effect
stringclasses 48
values | original_source_type
stringlengths 0
23k
| opens_and_abbrevs
listlengths 2
92
| isa_cross_project_example
bool 1
class | source_definition
stringlengths 9
57.9k
| partial_definition
stringlengths 7
23.3k
| is_div
bool 2
classes | is_type
null | is_proof
bool 2
classes | completed_definiton
stringlengths 1
250k
| dependencies
dict | effect_flags
sequencelengths 0
2
| ideal_premises
sequencelengths 0
236
| mutual_with
sequencelengths 0
11
| file_context
stringlengths 0
407k
| interleaved
bool 1
class | is_simply_typed
bool 2
classes | file_name
stringlengths 5
48
| vconfig
dict | is_simple_lemma
null | source_type
stringlengths 10
23k
| proof_features
sequencelengths 0
1
| name
stringlengths 8
95
| source
dict | verbose_type
stringlengths 1
7.42k
| source_range
dict |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Prims.Tot | val bn_mod_exp_consttime_precomp:
#t:limb_t
-> len:BN.meta_len t
-> bn_mod_exp_bm_consttime_precomp:bn_mod_exp_precomp_st t len
-> bn_mod_exp_fw_consttime_precomp:bn_mod_exp_precomp_st t len ->
bn_mod_exp_precomp_st t len | [
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Exponentiation",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.MontExponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Montgomery.Lemmas",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Exponentiation.Lemmas",
"short_module": "E"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontExponentiation",
"short_module": "AE"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.MontExponentiation",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontgomery",
"short_module": "AM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Montgomery",
"short_module": "BM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum",
"short_module": "BN"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Montgomery",
"short_module": "SM"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontgomery",
"short_module": "AM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Montgomery",
"short_module": "BM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum",
"short_module": "BN"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Exponentiation",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let bn_mod_exp_consttime_precomp #t len bn_mod_exp_bm_consttime_precomp bn_mod_exp_fw_consttime_precomp n mu r2 a bBits b res =
if bBits <. size SE.bn_exp_mont_consttime_threshold then
bn_mod_exp_bm_consttime_precomp n mu r2 a bBits b res
else
bn_mod_exp_fw_consttime_precomp n mu r2 a bBits b res | val bn_mod_exp_consttime_precomp:
#t:limb_t
-> len:BN.meta_len t
-> bn_mod_exp_bm_consttime_precomp:bn_mod_exp_precomp_st t len
-> bn_mod_exp_fw_consttime_precomp:bn_mod_exp_precomp_st t len ->
bn_mod_exp_precomp_st t len
let bn_mod_exp_consttime_precomp
#t
len
bn_mod_exp_bm_consttime_precomp
bn_mod_exp_fw_consttime_precomp
n
mu
r2
a
bBits
b
res
= | false | null | false | if bBits <. size SE.bn_exp_mont_consttime_threshold
then bn_mod_exp_bm_consttime_precomp n mu r2 a bBits b res
else bn_mod_exp_fw_consttime_precomp n mu r2 a bBits b res | {
"checked_file": "Hacl.Bignum.Exponentiation.fst.checked",
"dependencies": [
"prims.fst.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.Montgomery.Lemmas.fst.checked",
"Hacl.Spec.Exponentiation.Lemmas.fst.checked",
"Hacl.Spec.Bignum.Montgomery.fsti.checked",
"Hacl.Spec.Bignum.MontExponentiation.fst.checked",
"Hacl.Spec.Bignum.ModInvLimb.fsti.checked",
"Hacl.Spec.Bignum.Exponentiation.fst.checked",
"Hacl.Spec.Bignum.Exponentiation.fst.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"Hacl.Bignum.Montgomery.fst.checked",
"Hacl.Bignum.Montgomery.fst.checked",
"Hacl.Bignum.MontExponentiation.fst.checked",
"Hacl.Bignum.Definitions.fst.checked",
"Hacl.Bignum.AlmostMontgomery.fsti.checked",
"Hacl.Bignum.AlmostMontExponentiation.fst.checked",
"Hacl.Bignum.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": true,
"source_file": "Hacl.Bignum.Exponentiation.fst"
} | [
"total"
] | [
"Hacl.Bignum.Definitions.limb_t",
"Hacl.Bignum.meta_len",
"Hacl.Bignum.Exponentiation.bn_mod_exp_precomp_st",
"Hacl.Bignum.Definitions.lbignum",
"Hacl.Bignum.Definitions.limb",
"Lib.IntTypes.size_t",
"Hacl.Bignum.Definitions.blocks0",
"Lib.IntTypes.size",
"Lib.IntTypes.bits",
"Lib.IntTypes.op_Less_Dot",
"Lib.IntTypes.U32",
"Hacl.Spec.Bignum.MontExponentiation.bn_exp_mont_consttime_threshold",
"Prims.unit",
"Prims.bool"
] | [] | module Hacl.Bignum.Exponentiation
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Hacl.Bignum.Definitions
module ST = FStar.HyperStack.ST
module BD = Hacl.Spec.Bignum.Definitions
module SM = Hacl.Spec.Bignum.Montgomery
module BN = Hacl.Bignum
module BM = Hacl.Bignum.Montgomery
module AM = Hacl.Bignum.AlmostMontgomery
module ME = Hacl.Bignum.MontExponentiation
module AE = Hacl.Bignum.AlmostMontExponentiation
module E = Hacl.Spec.Exponentiation.Lemmas
module M = Hacl.Spec.Montgomery.Lemmas
module SE = Hacl.Spec.Bignum.MontExponentiation
module S = Hacl.Spec.Bignum.Exponentiation
friend Hacl.Spec.Bignum.Exponentiation
friend Hacl.Bignum.Montgomery
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
let bn_check_mod_exp #t len n a bBits b =
let m0 = BM.bn_check_modulus n in
let bLen = blocks0 bBits (size (bits t)) in
let m1 =
if bBits <. size (bits t) *! bLen
then BN.bn_lt_pow2_mask bLen b bBits
else ones t SEC in
let m2 = BN.bn_lt_mask len a n in
let m = m1 &. m2 in
m0 &. m
inline_for_extraction noextract
val mk_bn_mod_exp_precomp_mont:
#t:limb_t
-> k:BM.mont t
-> bn_exp_mont: ME.bn_exp_mont_st t k.BM.bn.BN.len ->
bn_mod_exp_precomp_st t k.BM.bn.BN.len
let mk_bn_mod_exp_precomp_mont #t k bn_exp_mont n mu r2 a bBits b res =
let h0 = ST.get () in
[@inline_let] let len = k.BM.bn.BN.len in
push_frame ();
BD.bn_eval_bound (as_seq h0 n) (v len);
let aM = create len (uint #t #SEC 0) in
BM.to n mu r2 a aM;
SM.bn_to_mont_lemma (as_seq h0 n) mu (as_seq h0 r2) (as_seq h0 a);
let resM = create len (uint #t #SEC 0) in
bn_exp_mont n mu r2 aM bBits b resM;
BM.from n mu resM res;
let h1 = ST.get () in
SM.bn_from_mont_lemma (as_seq h0 n) mu (as_seq h1 resM);
E.mod_exp_mont_ll_lemma (bits t) (v len) (bn_v h0 n) (v mu) (bn_v h0 a) (bn_v h0 b);
assert (bn_v h1 res == Lib.NatMod.pow_mod #(bn_v h0 n) (bn_v h0 a) (bn_v h0 b));
pop_frame ()
inline_for_extraction noextract
val mk_bn_mod_exp_precomp_amont:
#t:limb_t
-> k:AM.almost_mont t
-> bn_exp_amont: AE.bn_exp_almost_mont_st t k.AM.bn.BN.len ->
bn_mod_exp_precomp_st t k.AM.bn.BN.len
let mk_bn_mod_exp_precomp_amont #t k bn_exp_amont n mu r2 a bBits b res =
let h0 = ST.get () in
[@inline_let] let len = k.AM.bn.BN.len in
push_frame ();
BD.bn_eval_bound (as_seq h0 n) (v len);
let aM = create len (uint #t #SEC 0) in
AM.to n mu r2 a aM;
SM.bn_to_mont_lemma (as_seq h0 n) mu (as_seq h0 r2) (as_seq h0 a);
M.to_mont_lemma (bits t) (v len) (bn_v h0 n) (v mu) (bn_v h0 a);
let resM = create len (uint #t #SEC 0) in
bn_exp_amont n mu r2 aM bBits b resM;
AM.from n mu resM res;
let h1 = ST.get () in
SM.bn_from_mont_lemma (as_seq h0 n) mu (as_seq h1 resM);
BD.bn_eval_bound (as_seq h1 resM) (v len);
E.mod_exp_mont_ll_mod_lemma (bits t) (v len) (bn_v h0 n) (v mu) (bn_v h0 a) (bn_v h0 b) (bn_v h1 resM);
assert (bn_v h1 res == Lib.NatMod.pow_mod #(bn_v h0 n) (bn_v h0 a) (bn_v h0 b));
pop_frame ()
let bn_mod_exp_bm_vartime_precomp #t k n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_mont #t k (ME.bn_exp_mont_bm_vartime #t k) n mu r2 a bBits b res
let bn_mod_exp_bm_consttime_precomp #t k n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_mont #t k (ME.bn_exp_mont_bm_consttime #t k) n mu r2 a bBits b res
let bn_mod_exp_fw_vartime_precomp #t k l n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_mont #t k (ME.bn_exp_mont_fw_vartime #t k l) n mu r2 a bBits b res
let bn_mod_exp_fw_consttime_precomp #t k l n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_mont #t k (ME.bn_exp_mont_fw_consttime #t k l) n mu r2 a bBits b res
let bn_mod_exp_amm_bm_vartime_precomp #t k n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_amont #t k (AE.bn_exp_almost_mont_bm_vartime #t k) n mu r2 a bBits b res
let bn_mod_exp_amm_bm_consttime_precomp #t k n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_amont #t k (AE.bn_exp_almost_mont_bm_consttime #t k) n mu r2 a bBits b res
let bn_mod_exp_amm_fw_vartime_precomp #t k l n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_amont #t k (AE.bn_exp_almost_mont_fw_vartime #t k l) n mu r2 a bBits b res
let bn_mod_exp_amm_fw_consttime_precomp #t k l n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_amont #t k (AE.bn_exp_almost_mont_fw_consttime #t k l) n mu r2 a bBits b res | false | false | Hacl.Bignum.Exponentiation.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val bn_mod_exp_consttime_precomp:
#t:limb_t
-> len:BN.meta_len t
-> bn_mod_exp_bm_consttime_precomp:bn_mod_exp_precomp_st t len
-> bn_mod_exp_fw_consttime_precomp:bn_mod_exp_precomp_st t len ->
bn_mod_exp_precomp_st t len | [] | Hacl.Bignum.Exponentiation.bn_mod_exp_consttime_precomp | {
"file_name": "code/bignum/Hacl.Bignum.Exponentiation.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
len: Hacl.Bignum.meta_len t ->
bn_mod_exp_bm_consttime_precomp: Hacl.Bignum.Exponentiation.bn_mod_exp_precomp_st t len ->
bn_mod_exp_fw_consttime_precomp: Hacl.Bignum.Exponentiation.bn_mod_exp_precomp_st t len
-> Hacl.Bignum.Exponentiation.bn_mod_exp_precomp_st t len | {
"end_col": 57,
"end_line": 130,
"start_col": 2,
"start_line": 127
} |
Prims.Tot | val mk_bn_mod_exp_precompr2:
#t:limb_t
-> len:BN.meta_len t
-> bn_mod_exp_precomp:bn_mod_exp_precomp_st t len ->
bn_mod_exp_precompr2_st t len | [
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Exponentiation",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.MontExponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Montgomery.Lemmas",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Exponentiation.Lemmas",
"short_module": "E"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontExponentiation",
"short_module": "AE"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.MontExponentiation",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontgomery",
"short_module": "AM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Montgomery",
"short_module": "BM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum",
"short_module": "BN"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Montgomery",
"short_module": "SM"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontgomery",
"short_module": "AM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Montgomery",
"short_module": "BM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum",
"short_module": "BN"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Exponentiation",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mk_bn_mod_exp_precompr2 #t len bn_mod_exp_precomp n r2 a bBits b res =
let h0 = ST.get () in
let mu = BM.mod_inv_limb n.(0ul) in // n * mu = 1 (mod (pow2 64))
Hacl.Spec.Bignum.ModInvLimb.bn_mod_inv_limb_lemma (as_seq h0 n);
bn_mod_exp_precomp n mu r2 a bBits b res | val mk_bn_mod_exp_precompr2:
#t:limb_t
-> len:BN.meta_len t
-> bn_mod_exp_precomp:bn_mod_exp_precomp_st t len ->
bn_mod_exp_precompr2_st t len
let mk_bn_mod_exp_precompr2 #t len bn_mod_exp_precomp n r2 a bBits b res = | false | null | false | let h0 = ST.get () in
let mu = BM.mod_inv_limb n.(0ul) in
Hacl.Spec.Bignum.ModInvLimb.bn_mod_inv_limb_lemma (as_seq h0 n);
bn_mod_exp_precomp n mu r2 a bBits b res | {
"checked_file": "Hacl.Bignum.Exponentiation.fst.checked",
"dependencies": [
"prims.fst.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.Montgomery.Lemmas.fst.checked",
"Hacl.Spec.Exponentiation.Lemmas.fst.checked",
"Hacl.Spec.Bignum.Montgomery.fsti.checked",
"Hacl.Spec.Bignum.MontExponentiation.fst.checked",
"Hacl.Spec.Bignum.ModInvLimb.fsti.checked",
"Hacl.Spec.Bignum.Exponentiation.fst.checked",
"Hacl.Spec.Bignum.Exponentiation.fst.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"Hacl.Bignum.Montgomery.fst.checked",
"Hacl.Bignum.Montgomery.fst.checked",
"Hacl.Bignum.MontExponentiation.fst.checked",
"Hacl.Bignum.Definitions.fst.checked",
"Hacl.Bignum.AlmostMontgomery.fsti.checked",
"Hacl.Bignum.AlmostMontExponentiation.fst.checked",
"Hacl.Bignum.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": true,
"source_file": "Hacl.Bignum.Exponentiation.fst"
} | [
"total"
] | [
"Hacl.Bignum.Definitions.limb_t",
"Hacl.Bignum.meta_len",
"Hacl.Bignum.Exponentiation.bn_mod_exp_precomp_st",
"Hacl.Bignum.Definitions.lbignum",
"Lib.IntTypes.size_t",
"Hacl.Bignum.Definitions.blocks0",
"Lib.IntTypes.size",
"Lib.IntTypes.bits",
"Prims.unit",
"Hacl.Spec.Bignum.ModInvLimb.bn_mod_inv_limb_lemma",
"Lib.IntTypes.v",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Lib.Buffer.as_seq",
"Lib.Buffer.MUT",
"Hacl.Bignum.Definitions.limb",
"Hacl.Bignum.ModInvLimb.mod_inv_limb",
"Lib.Buffer.op_Array_Access",
"FStar.UInt32.__uint_to_t",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get"
] | [] | module Hacl.Bignum.Exponentiation
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Hacl.Bignum.Definitions
module ST = FStar.HyperStack.ST
module BD = Hacl.Spec.Bignum.Definitions
module SM = Hacl.Spec.Bignum.Montgomery
module BN = Hacl.Bignum
module BM = Hacl.Bignum.Montgomery
module AM = Hacl.Bignum.AlmostMontgomery
module ME = Hacl.Bignum.MontExponentiation
module AE = Hacl.Bignum.AlmostMontExponentiation
module E = Hacl.Spec.Exponentiation.Lemmas
module M = Hacl.Spec.Montgomery.Lemmas
module SE = Hacl.Spec.Bignum.MontExponentiation
module S = Hacl.Spec.Bignum.Exponentiation
friend Hacl.Spec.Bignum.Exponentiation
friend Hacl.Bignum.Montgomery
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
let bn_check_mod_exp #t len n a bBits b =
let m0 = BM.bn_check_modulus n in
let bLen = blocks0 bBits (size (bits t)) in
let m1 =
if bBits <. size (bits t) *! bLen
then BN.bn_lt_pow2_mask bLen b bBits
else ones t SEC in
let m2 = BN.bn_lt_mask len a n in
let m = m1 &. m2 in
m0 &. m
inline_for_extraction noextract
val mk_bn_mod_exp_precomp_mont:
#t:limb_t
-> k:BM.mont t
-> bn_exp_mont: ME.bn_exp_mont_st t k.BM.bn.BN.len ->
bn_mod_exp_precomp_st t k.BM.bn.BN.len
let mk_bn_mod_exp_precomp_mont #t k bn_exp_mont n mu r2 a bBits b res =
let h0 = ST.get () in
[@inline_let] let len = k.BM.bn.BN.len in
push_frame ();
BD.bn_eval_bound (as_seq h0 n) (v len);
let aM = create len (uint #t #SEC 0) in
BM.to n mu r2 a aM;
SM.bn_to_mont_lemma (as_seq h0 n) mu (as_seq h0 r2) (as_seq h0 a);
let resM = create len (uint #t #SEC 0) in
bn_exp_mont n mu r2 aM bBits b resM;
BM.from n mu resM res;
let h1 = ST.get () in
SM.bn_from_mont_lemma (as_seq h0 n) mu (as_seq h1 resM);
E.mod_exp_mont_ll_lemma (bits t) (v len) (bn_v h0 n) (v mu) (bn_v h0 a) (bn_v h0 b);
assert (bn_v h1 res == Lib.NatMod.pow_mod #(bn_v h0 n) (bn_v h0 a) (bn_v h0 b));
pop_frame ()
inline_for_extraction noextract
val mk_bn_mod_exp_precomp_amont:
#t:limb_t
-> k:AM.almost_mont t
-> bn_exp_amont: AE.bn_exp_almost_mont_st t k.AM.bn.BN.len ->
bn_mod_exp_precomp_st t k.AM.bn.BN.len
let mk_bn_mod_exp_precomp_amont #t k bn_exp_amont n mu r2 a bBits b res =
let h0 = ST.get () in
[@inline_let] let len = k.AM.bn.BN.len in
push_frame ();
BD.bn_eval_bound (as_seq h0 n) (v len);
let aM = create len (uint #t #SEC 0) in
AM.to n mu r2 a aM;
SM.bn_to_mont_lemma (as_seq h0 n) mu (as_seq h0 r2) (as_seq h0 a);
M.to_mont_lemma (bits t) (v len) (bn_v h0 n) (v mu) (bn_v h0 a);
let resM = create len (uint #t #SEC 0) in
bn_exp_amont n mu r2 aM bBits b resM;
AM.from n mu resM res;
let h1 = ST.get () in
SM.bn_from_mont_lemma (as_seq h0 n) mu (as_seq h1 resM);
BD.bn_eval_bound (as_seq h1 resM) (v len);
E.mod_exp_mont_ll_mod_lemma (bits t) (v len) (bn_v h0 n) (v mu) (bn_v h0 a) (bn_v h0 b) (bn_v h1 resM);
assert (bn_v h1 res == Lib.NatMod.pow_mod #(bn_v h0 n) (bn_v h0 a) (bn_v h0 b));
pop_frame ()
let bn_mod_exp_bm_vartime_precomp #t k n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_mont #t k (ME.bn_exp_mont_bm_vartime #t k) n mu r2 a bBits b res
let bn_mod_exp_bm_consttime_precomp #t k n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_mont #t k (ME.bn_exp_mont_bm_consttime #t k) n mu r2 a bBits b res
let bn_mod_exp_fw_vartime_precomp #t k l n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_mont #t k (ME.bn_exp_mont_fw_vartime #t k l) n mu r2 a bBits b res
let bn_mod_exp_fw_consttime_precomp #t k l n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_mont #t k (ME.bn_exp_mont_fw_consttime #t k l) n mu r2 a bBits b res
let bn_mod_exp_amm_bm_vartime_precomp #t k n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_amont #t k (AE.bn_exp_almost_mont_bm_vartime #t k) n mu r2 a bBits b res
let bn_mod_exp_amm_bm_consttime_precomp #t k n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_amont #t k (AE.bn_exp_almost_mont_bm_consttime #t k) n mu r2 a bBits b res
let bn_mod_exp_amm_fw_vartime_precomp #t k l n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_amont #t k (AE.bn_exp_almost_mont_fw_vartime #t k l) n mu r2 a bBits b res
let bn_mod_exp_amm_fw_consttime_precomp #t k l n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_amont #t k (AE.bn_exp_almost_mont_fw_consttime #t k l) n mu r2 a bBits b res
let bn_mod_exp_consttime_precomp #t len bn_mod_exp_bm_consttime_precomp bn_mod_exp_fw_consttime_precomp n mu r2 a bBits b res =
if bBits <. size SE.bn_exp_mont_consttime_threshold then
bn_mod_exp_bm_consttime_precomp n mu r2 a bBits b res
else
bn_mod_exp_fw_consttime_precomp n mu r2 a bBits b res
let bn_mod_exp_vartime_precomp #t len bn_mod_exp_bm_vartime_precomp bn_mod_exp_fw_vartime_precomp n mu r2 a bBits b res =
if bBits <. size SE.bn_exp_mont_vartime_threshold then
bn_mod_exp_bm_vartime_precomp n mu r2 a bBits b res
else
bn_mod_exp_fw_vartime_precomp n mu r2 a bBits b res | false | false | Hacl.Bignum.Exponentiation.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mk_bn_mod_exp_precompr2:
#t:limb_t
-> len:BN.meta_len t
-> bn_mod_exp_precomp:bn_mod_exp_precomp_st t len ->
bn_mod_exp_precompr2_st t len | [] | Hacl.Bignum.Exponentiation.mk_bn_mod_exp_precompr2 | {
"file_name": "code/bignum/Hacl.Bignum.Exponentiation.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
len: Hacl.Bignum.meta_len t ->
bn_mod_exp_precomp: Hacl.Bignum.Exponentiation.bn_mod_exp_precomp_st t len
-> Hacl.Bignum.Exponentiation.bn_mod_exp_precompr2_st t len | {
"end_col": 42,
"end_line": 144,
"start_col": 74,
"start_line": 140
} |
Prims.Tot | val bn_mod_exp_vartime_precomp:
#t:limb_t
-> len:BN.meta_len t
-> bn_mod_exp_bm_vartime_precomp:bn_mod_exp_precomp_st t len
-> bn_mod_exp_fw_vartime_precomp:bn_mod_exp_precomp_st t len ->
bn_mod_exp_precomp_st t len | [
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Exponentiation",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.MontExponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Montgomery.Lemmas",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Exponentiation.Lemmas",
"short_module": "E"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontExponentiation",
"short_module": "AE"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.MontExponentiation",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontgomery",
"short_module": "AM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Montgomery",
"short_module": "BM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum",
"short_module": "BN"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Montgomery",
"short_module": "SM"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontgomery",
"short_module": "AM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Montgomery",
"short_module": "BM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum",
"short_module": "BN"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Exponentiation",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let bn_mod_exp_vartime_precomp #t len bn_mod_exp_bm_vartime_precomp bn_mod_exp_fw_vartime_precomp n mu r2 a bBits b res =
if bBits <. size SE.bn_exp_mont_vartime_threshold then
bn_mod_exp_bm_vartime_precomp n mu r2 a bBits b res
else
bn_mod_exp_fw_vartime_precomp n mu r2 a bBits b res | val bn_mod_exp_vartime_precomp:
#t:limb_t
-> len:BN.meta_len t
-> bn_mod_exp_bm_vartime_precomp:bn_mod_exp_precomp_st t len
-> bn_mod_exp_fw_vartime_precomp:bn_mod_exp_precomp_st t len ->
bn_mod_exp_precomp_st t len
let bn_mod_exp_vartime_precomp
#t
len
bn_mod_exp_bm_vartime_precomp
bn_mod_exp_fw_vartime_precomp
n
mu
r2
a
bBits
b
res
= | false | null | false | if bBits <. size SE.bn_exp_mont_vartime_threshold
then bn_mod_exp_bm_vartime_precomp n mu r2 a bBits b res
else bn_mod_exp_fw_vartime_precomp n mu r2 a bBits b res | {
"checked_file": "Hacl.Bignum.Exponentiation.fst.checked",
"dependencies": [
"prims.fst.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.Montgomery.Lemmas.fst.checked",
"Hacl.Spec.Exponentiation.Lemmas.fst.checked",
"Hacl.Spec.Bignum.Montgomery.fsti.checked",
"Hacl.Spec.Bignum.MontExponentiation.fst.checked",
"Hacl.Spec.Bignum.ModInvLimb.fsti.checked",
"Hacl.Spec.Bignum.Exponentiation.fst.checked",
"Hacl.Spec.Bignum.Exponentiation.fst.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"Hacl.Bignum.Montgomery.fst.checked",
"Hacl.Bignum.Montgomery.fst.checked",
"Hacl.Bignum.MontExponentiation.fst.checked",
"Hacl.Bignum.Definitions.fst.checked",
"Hacl.Bignum.AlmostMontgomery.fsti.checked",
"Hacl.Bignum.AlmostMontExponentiation.fst.checked",
"Hacl.Bignum.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": true,
"source_file": "Hacl.Bignum.Exponentiation.fst"
} | [
"total"
] | [
"Hacl.Bignum.Definitions.limb_t",
"Hacl.Bignum.meta_len",
"Hacl.Bignum.Exponentiation.bn_mod_exp_precomp_st",
"Hacl.Bignum.Definitions.lbignum",
"Hacl.Bignum.Definitions.limb",
"Lib.IntTypes.size_t",
"Hacl.Bignum.Definitions.blocks0",
"Lib.IntTypes.size",
"Lib.IntTypes.bits",
"Lib.IntTypes.op_Less_Dot",
"Lib.IntTypes.U32",
"Hacl.Spec.Bignum.MontExponentiation.bn_exp_mont_vartime_threshold",
"Prims.unit",
"Prims.bool"
] | [] | module Hacl.Bignum.Exponentiation
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Hacl.Bignum.Definitions
module ST = FStar.HyperStack.ST
module BD = Hacl.Spec.Bignum.Definitions
module SM = Hacl.Spec.Bignum.Montgomery
module BN = Hacl.Bignum
module BM = Hacl.Bignum.Montgomery
module AM = Hacl.Bignum.AlmostMontgomery
module ME = Hacl.Bignum.MontExponentiation
module AE = Hacl.Bignum.AlmostMontExponentiation
module E = Hacl.Spec.Exponentiation.Lemmas
module M = Hacl.Spec.Montgomery.Lemmas
module SE = Hacl.Spec.Bignum.MontExponentiation
module S = Hacl.Spec.Bignum.Exponentiation
friend Hacl.Spec.Bignum.Exponentiation
friend Hacl.Bignum.Montgomery
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
let bn_check_mod_exp #t len n a bBits b =
let m0 = BM.bn_check_modulus n in
let bLen = blocks0 bBits (size (bits t)) in
let m1 =
if bBits <. size (bits t) *! bLen
then BN.bn_lt_pow2_mask bLen b bBits
else ones t SEC in
let m2 = BN.bn_lt_mask len a n in
let m = m1 &. m2 in
m0 &. m
inline_for_extraction noextract
val mk_bn_mod_exp_precomp_mont:
#t:limb_t
-> k:BM.mont t
-> bn_exp_mont: ME.bn_exp_mont_st t k.BM.bn.BN.len ->
bn_mod_exp_precomp_st t k.BM.bn.BN.len
let mk_bn_mod_exp_precomp_mont #t k bn_exp_mont n mu r2 a bBits b res =
let h0 = ST.get () in
[@inline_let] let len = k.BM.bn.BN.len in
push_frame ();
BD.bn_eval_bound (as_seq h0 n) (v len);
let aM = create len (uint #t #SEC 0) in
BM.to n mu r2 a aM;
SM.bn_to_mont_lemma (as_seq h0 n) mu (as_seq h0 r2) (as_seq h0 a);
let resM = create len (uint #t #SEC 0) in
bn_exp_mont n mu r2 aM bBits b resM;
BM.from n mu resM res;
let h1 = ST.get () in
SM.bn_from_mont_lemma (as_seq h0 n) mu (as_seq h1 resM);
E.mod_exp_mont_ll_lemma (bits t) (v len) (bn_v h0 n) (v mu) (bn_v h0 a) (bn_v h0 b);
assert (bn_v h1 res == Lib.NatMod.pow_mod #(bn_v h0 n) (bn_v h0 a) (bn_v h0 b));
pop_frame ()
inline_for_extraction noextract
val mk_bn_mod_exp_precomp_amont:
#t:limb_t
-> k:AM.almost_mont t
-> bn_exp_amont: AE.bn_exp_almost_mont_st t k.AM.bn.BN.len ->
bn_mod_exp_precomp_st t k.AM.bn.BN.len
let mk_bn_mod_exp_precomp_amont #t k bn_exp_amont n mu r2 a bBits b res =
let h0 = ST.get () in
[@inline_let] let len = k.AM.bn.BN.len in
push_frame ();
BD.bn_eval_bound (as_seq h0 n) (v len);
let aM = create len (uint #t #SEC 0) in
AM.to n mu r2 a aM;
SM.bn_to_mont_lemma (as_seq h0 n) mu (as_seq h0 r2) (as_seq h0 a);
M.to_mont_lemma (bits t) (v len) (bn_v h0 n) (v mu) (bn_v h0 a);
let resM = create len (uint #t #SEC 0) in
bn_exp_amont n mu r2 aM bBits b resM;
AM.from n mu resM res;
let h1 = ST.get () in
SM.bn_from_mont_lemma (as_seq h0 n) mu (as_seq h1 resM);
BD.bn_eval_bound (as_seq h1 resM) (v len);
E.mod_exp_mont_ll_mod_lemma (bits t) (v len) (bn_v h0 n) (v mu) (bn_v h0 a) (bn_v h0 b) (bn_v h1 resM);
assert (bn_v h1 res == Lib.NatMod.pow_mod #(bn_v h0 n) (bn_v h0 a) (bn_v h0 b));
pop_frame ()
let bn_mod_exp_bm_vartime_precomp #t k n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_mont #t k (ME.bn_exp_mont_bm_vartime #t k) n mu r2 a bBits b res
let bn_mod_exp_bm_consttime_precomp #t k n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_mont #t k (ME.bn_exp_mont_bm_consttime #t k) n mu r2 a bBits b res
let bn_mod_exp_fw_vartime_precomp #t k l n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_mont #t k (ME.bn_exp_mont_fw_vartime #t k l) n mu r2 a bBits b res
let bn_mod_exp_fw_consttime_precomp #t k l n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_mont #t k (ME.bn_exp_mont_fw_consttime #t k l) n mu r2 a bBits b res
let bn_mod_exp_amm_bm_vartime_precomp #t k n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_amont #t k (AE.bn_exp_almost_mont_bm_vartime #t k) n mu r2 a bBits b res
let bn_mod_exp_amm_bm_consttime_precomp #t k n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_amont #t k (AE.bn_exp_almost_mont_bm_consttime #t k) n mu r2 a bBits b res
let bn_mod_exp_amm_fw_vartime_precomp #t k l n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_amont #t k (AE.bn_exp_almost_mont_fw_vartime #t k l) n mu r2 a bBits b res
let bn_mod_exp_amm_fw_consttime_precomp #t k l n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_amont #t k (AE.bn_exp_almost_mont_fw_consttime #t k l) n mu r2 a bBits b res
let bn_mod_exp_consttime_precomp #t len bn_mod_exp_bm_consttime_precomp bn_mod_exp_fw_consttime_precomp n mu r2 a bBits b res =
if bBits <. size SE.bn_exp_mont_consttime_threshold then
bn_mod_exp_bm_consttime_precomp n mu r2 a bBits b res
else
bn_mod_exp_fw_consttime_precomp n mu r2 a bBits b res | false | false | Hacl.Bignum.Exponentiation.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val bn_mod_exp_vartime_precomp:
#t:limb_t
-> len:BN.meta_len t
-> bn_mod_exp_bm_vartime_precomp:bn_mod_exp_precomp_st t len
-> bn_mod_exp_fw_vartime_precomp:bn_mod_exp_precomp_st t len ->
bn_mod_exp_precomp_st t len | [] | Hacl.Bignum.Exponentiation.bn_mod_exp_vartime_precomp | {
"file_name": "code/bignum/Hacl.Bignum.Exponentiation.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
len: Hacl.Bignum.meta_len t ->
bn_mod_exp_bm_vartime_precomp: Hacl.Bignum.Exponentiation.bn_mod_exp_precomp_st t len ->
bn_mod_exp_fw_vartime_precomp: Hacl.Bignum.Exponentiation.bn_mod_exp_precomp_st t len
-> Hacl.Bignum.Exponentiation.bn_mod_exp_precomp_st t len | {
"end_col": 55,
"end_line": 137,
"start_col": 2,
"start_line": 134
} |
Prims.Tot | val bn_check_mod_exp: #t:limb_t -> len:BN.meta_len t -> bn_check_mod_exp_st t len | [
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Exponentiation",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.MontExponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Montgomery.Lemmas",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Exponentiation.Lemmas",
"short_module": "E"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontExponentiation",
"short_module": "AE"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.MontExponentiation",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontgomery",
"short_module": "AM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Montgomery",
"short_module": "BM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum",
"short_module": "BN"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Montgomery",
"short_module": "SM"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontgomery",
"short_module": "AM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Montgomery",
"short_module": "BM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum",
"short_module": "BN"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Exponentiation",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let bn_check_mod_exp #t len n a bBits b =
let m0 = BM.bn_check_modulus n in
let bLen = blocks0 bBits (size (bits t)) in
let m1 =
if bBits <. size (bits t) *! bLen
then BN.bn_lt_pow2_mask bLen b bBits
else ones t SEC in
let m2 = BN.bn_lt_mask len a n in
let m = m1 &. m2 in
m0 &. m | val bn_check_mod_exp: #t:limb_t -> len:BN.meta_len t -> bn_check_mod_exp_st t len
let bn_check_mod_exp #t len n a bBits b = | false | null | false | let m0 = BM.bn_check_modulus n in
let bLen = blocks0 bBits (size (bits t)) in
let m1 = if bBits <. size (bits t) *! bLen then BN.bn_lt_pow2_mask bLen b bBits else ones t SEC in
let m2 = BN.bn_lt_mask len a n in
let m = m1 &. m2 in
m0 &. m | {
"checked_file": "Hacl.Bignum.Exponentiation.fst.checked",
"dependencies": [
"prims.fst.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.Montgomery.Lemmas.fst.checked",
"Hacl.Spec.Exponentiation.Lemmas.fst.checked",
"Hacl.Spec.Bignum.Montgomery.fsti.checked",
"Hacl.Spec.Bignum.MontExponentiation.fst.checked",
"Hacl.Spec.Bignum.ModInvLimb.fsti.checked",
"Hacl.Spec.Bignum.Exponentiation.fst.checked",
"Hacl.Spec.Bignum.Exponentiation.fst.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"Hacl.Bignum.Montgomery.fst.checked",
"Hacl.Bignum.Montgomery.fst.checked",
"Hacl.Bignum.MontExponentiation.fst.checked",
"Hacl.Bignum.Definitions.fst.checked",
"Hacl.Bignum.AlmostMontgomery.fsti.checked",
"Hacl.Bignum.AlmostMontExponentiation.fst.checked",
"Hacl.Bignum.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": true,
"source_file": "Hacl.Bignum.Exponentiation.fst"
} | [
"total"
] | [
"Hacl.Bignum.Definitions.limb_t",
"Hacl.Bignum.meta_len",
"Hacl.Bignum.Definitions.lbignum",
"Lib.IntTypes.size_t",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.Mul.op_Star",
"Lib.IntTypes.bits",
"Lib.IntTypes.v",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Hacl.Bignum.Definitions.blocks0",
"Lib.IntTypes.size",
"Lib.IntTypes.max_size_t",
"Lib.IntTypes.op_Amp_Dot",
"Lib.IntTypes.SEC",
"Lib.IntTypes.int_t",
"Hacl.Bignum.Definitions.limb",
"Hacl.Bignum.bn_lt_mask",
"Lib.IntTypes.op_Less_Dot",
"Lib.IntTypes.op_Star_Bang",
"Hacl.Bignum.bn_lt_pow2_mask",
"Prims.bool",
"Lib.IntTypes.ones",
"Prims.eq2",
"Prims.int",
"Prims.l_or",
"Lib.IntTypes.range",
"Prims.l_and",
"Prims.op_GreaterThan",
"Prims.op_Subtraction",
"Prims.pow2",
"Prims.op_Multiply",
"Lib.IntTypes.mk_int",
"Hacl.Spec.Bignum.Definitions.blocks0",
"Hacl.Bignum.Montgomery.bn_check_modulus"
] | [] | module Hacl.Bignum.Exponentiation
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Hacl.Bignum.Definitions
module ST = FStar.HyperStack.ST
module BD = Hacl.Spec.Bignum.Definitions
module SM = Hacl.Spec.Bignum.Montgomery
module BN = Hacl.Bignum
module BM = Hacl.Bignum.Montgomery
module AM = Hacl.Bignum.AlmostMontgomery
module ME = Hacl.Bignum.MontExponentiation
module AE = Hacl.Bignum.AlmostMontExponentiation
module E = Hacl.Spec.Exponentiation.Lemmas
module M = Hacl.Spec.Montgomery.Lemmas
module SE = Hacl.Spec.Bignum.MontExponentiation
module S = Hacl.Spec.Bignum.Exponentiation
friend Hacl.Spec.Bignum.Exponentiation
friend Hacl.Bignum.Montgomery
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0" | false | false | Hacl.Bignum.Exponentiation.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val bn_check_mod_exp: #t:limb_t -> len:BN.meta_len t -> bn_check_mod_exp_st t len | [] | Hacl.Bignum.Exponentiation.bn_check_mod_exp | {
"file_name": "code/bignum/Hacl.Bignum.Exponentiation.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | len: Hacl.Bignum.meta_len t -> Hacl.Bignum.Exponentiation.bn_check_mod_exp_st t len | {
"end_col": 9,
"end_line": 42,
"start_col": 41,
"start_line": 33
} |
Prims.Tot | val mk_bn_mod_exp:
#t:limb_t
-> len:BN.meta_len t
-> precomp_r2:BM.bn_precomp_r2_mod_n_st t len
-> bn_mod_exp_precomp:bn_mod_exp_precomp_st t len ->
bn_mod_exp_st t len | [
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Exponentiation",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.MontExponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Montgomery.Lemmas",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Exponentiation.Lemmas",
"short_module": "E"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontExponentiation",
"short_module": "AE"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.MontExponentiation",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontgomery",
"short_module": "AM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Montgomery",
"short_module": "BM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum",
"short_module": "BN"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Montgomery",
"short_module": "SM"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontgomery",
"short_module": "AM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Montgomery",
"short_module": "BM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum",
"short_module": "BN"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Exponentiation",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mk_bn_mod_exp #t len precomp_r2 bn_mod_exp_precomp nBits n a bBits b res =
push_frame ();
let r2 = create len (uint #t #SEC 0) in
let mu = BM.bn_mont_precomp len precomp_r2 nBits n r2 in
bn_mod_exp_precomp n mu r2 a bBits b res;
pop_frame () | val mk_bn_mod_exp:
#t:limb_t
-> len:BN.meta_len t
-> precomp_r2:BM.bn_precomp_r2_mod_n_st t len
-> bn_mod_exp_precomp:bn_mod_exp_precomp_st t len ->
bn_mod_exp_st t len
let mk_bn_mod_exp #t len precomp_r2 bn_mod_exp_precomp nBits n a bBits b res = | false | null | false | push_frame ();
let r2 = create len (uint #t #SEC 0) in
let mu = BM.bn_mont_precomp len precomp_r2 nBits n r2 in
bn_mod_exp_precomp n mu r2 a bBits b res;
pop_frame () | {
"checked_file": "Hacl.Bignum.Exponentiation.fst.checked",
"dependencies": [
"prims.fst.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.Montgomery.Lemmas.fst.checked",
"Hacl.Spec.Exponentiation.Lemmas.fst.checked",
"Hacl.Spec.Bignum.Montgomery.fsti.checked",
"Hacl.Spec.Bignum.MontExponentiation.fst.checked",
"Hacl.Spec.Bignum.ModInvLimb.fsti.checked",
"Hacl.Spec.Bignum.Exponentiation.fst.checked",
"Hacl.Spec.Bignum.Exponentiation.fst.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"Hacl.Bignum.Montgomery.fst.checked",
"Hacl.Bignum.Montgomery.fst.checked",
"Hacl.Bignum.MontExponentiation.fst.checked",
"Hacl.Bignum.Definitions.fst.checked",
"Hacl.Bignum.AlmostMontgomery.fsti.checked",
"Hacl.Bignum.AlmostMontExponentiation.fst.checked",
"Hacl.Bignum.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": true,
"source_file": "Hacl.Bignum.Exponentiation.fst"
} | [
"total"
] | [
"Hacl.Bignum.Definitions.limb_t",
"Hacl.Bignum.meta_len",
"Hacl.Bignum.Montgomery.bn_precomp_r2_mod_n_st",
"Hacl.Bignum.Exponentiation.bn_mod_exp_precomp_st",
"Lib.IntTypes.size_t",
"Hacl.Bignum.Definitions.lbignum",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.Mul.op_Star",
"Lib.IntTypes.bits",
"Lib.IntTypes.v",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Hacl.Bignum.Definitions.blocks0",
"Lib.IntTypes.size",
"Lib.IntTypes.max_size_t",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Hacl.Bignum.Definitions.limb",
"Hacl.Bignum.Montgomery.bn_mont_precomp",
"Lib.Buffer.lbuffer_t",
"Lib.Buffer.MUT",
"Lib.Buffer.create",
"Lib.IntTypes.uint",
"Lib.IntTypes.SEC",
"Lib.Buffer.lbuffer",
"FStar.HyperStack.ST.push_frame"
] | [] | module Hacl.Bignum.Exponentiation
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Hacl.Bignum.Definitions
module ST = FStar.HyperStack.ST
module BD = Hacl.Spec.Bignum.Definitions
module SM = Hacl.Spec.Bignum.Montgomery
module BN = Hacl.Bignum
module BM = Hacl.Bignum.Montgomery
module AM = Hacl.Bignum.AlmostMontgomery
module ME = Hacl.Bignum.MontExponentiation
module AE = Hacl.Bignum.AlmostMontExponentiation
module E = Hacl.Spec.Exponentiation.Lemmas
module M = Hacl.Spec.Montgomery.Lemmas
module SE = Hacl.Spec.Bignum.MontExponentiation
module S = Hacl.Spec.Bignum.Exponentiation
friend Hacl.Spec.Bignum.Exponentiation
friend Hacl.Bignum.Montgomery
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
let bn_check_mod_exp #t len n a bBits b =
let m0 = BM.bn_check_modulus n in
let bLen = blocks0 bBits (size (bits t)) in
let m1 =
if bBits <. size (bits t) *! bLen
then BN.bn_lt_pow2_mask bLen b bBits
else ones t SEC in
let m2 = BN.bn_lt_mask len a n in
let m = m1 &. m2 in
m0 &. m
inline_for_extraction noextract
val mk_bn_mod_exp_precomp_mont:
#t:limb_t
-> k:BM.mont t
-> bn_exp_mont: ME.bn_exp_mont_st t k.BM.bn.BN.len ->
bn_mod_exp_precomp_st t k.BM.bn.BN.len
let mk_bn_mod_exp_precomp_mont #t k bn_exp_mont n mu r2 a bBits b res =
let h0 = ST.get () in
[@inline_let] let len = k.BM.bn.BN.len in
push_frame ();
BD.bn_eval_bound (as_seq h0 n) (v len);
let aM = create len (uint #t #SEC 0) in
BM.to n mu r2 a aM;
SM.bn_to_mont_lemma (as_seq h0 n) mu (as_seq h0 r2) (as_seq h0 a);
let resM = create len (uint #t #SEC 0) in
bn_exp_mont n mu r2 aM bBits b resM;
BM.from n mu resM res;
let h1 = ST.get () in
SM.bn_from_mont_lemma (as_seq h0 n) mu (as_seq h1 resM);
E.mod_exp_mont_ll_lemma (bits t) (v len) (bn_v h0 n) (v mu) (bn_v h0 a) (bn_v h0 b);
assert (bn_v h1 res == Lib.NatMod.pow_mod #(bn_v h0 n) (bn_v h0 a) (bn_v h0 b));
pop_frame ()
inline_for_extraction noextract
val mk_bn_mod_exp_precomp_amont:
#t:limb_t
-> k:AM.almost_mont t
-> bn_exp_amont: AE.bn_exp_almost_mont_st t k.AM.bn.BN.len ->
bn_mod_exp_precomp_st t k.AM.bn.BN.len
let mk_bn_mod_exp_precomp_amont #t k bn_exp_amont n mu r2 a bBits b res =
let h0 = ST.get () in
[@inline_let] let len = k.AM.bn.BN.len in
push_frame ();
BD.bn_eval_bound (as_seq h0 n) (v len);
let aM = create len (uint #t #SEC 0) in
AM.to n mu r2 a aM;
SM.bn_to_mont_lemma (as_seq h0 n) mu (as_seq h0 r2) (as_seq h0 a);
M.to_mont_lemma (bits t) (v len) (bn_v h0 n) (v mu) (bn_v h0 a);
let resM = create len (uint #t #SEC 0) in
bn_exp_amont n mu r2 aM bBits b resM;
AM.from n mu resM res;
let h1 = ST.get () in
SM.bn_from_mont_lemma (as_seq h0 n) mu (as_seq h1 resM);
BD.bn_eval_bound (as_seq h1 resM) (v len);
E.mod_exp_mont_ll_mod_lemma (bits t) (v len) (bn_v h0 n) (v mu) (bn_v h0 a) (bn_v h0 b) (bn_v h1 resM);
assert (bn_v h1 res == Lib.NatMod.pow_mod #(bn_v h0 n) (bn_v h0 a) (bn_v h0 b));
pop_frame ()
let bn_mod_exp_bm_vartime_precomp #t k n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_mont #t k (ME.bn_exp_mont_bm_vartime #t k) n mu r2 a bBits b res
let bn_mod_exp_bm_consttime_precomp #t k n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_mont #t k (ME.bn_exp_mont_bm_consttime #t k) n mu r2 a bBits b res
let bn_mod_exp_fw_vartime_precomp #t k l n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_mont #t k (ME.bn_exp_mont_fw_vartime #t k l) n mu r2 a bBits b res
let bn_mod_exp_fw_consttime_precomp #t k l n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_mont #t k (ME.bn_exp_mont_fw_consttime #t k l) n mu r2 a bBits b res
let bn_mod_exp_amm_bm_vartime_precomp #t k n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_amont #t k (AE.bn_exp_almost_mont_bm_vartime #t k) n mu r2 a bBits b res
let bn_mod_exp_amm_bm_consttime_precomp #t k n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_amont #t k (AE.bn_exp_almost_mont_bm_consttime #t k) n mu r2 a bBits b res
let bn_mod_exp_amm_fw_vartime_precomp #t k l n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_amont #t k (AE.bn_exp_almost_mont_fw_vartime #t k l) n mu r2 a bBits b res
let bn_mod_exp_amm_fw_consttime_precomp #t k l n mu r2 a bBits b res =
mk_bn_mod_exp_precomp_amont #t k (AE.bn_exp_almost_mont_fw_consttime #t k l) n mu r2 a bBits b res
let bn_mod_exp_consttime_precomp #t len bn_mod_exp_bm_consttime_precomp bn_mod_exp_fw_consttime_precomp n mu r2 a bBits b res =
if bBits <. size SE.bn_exp_mont_consttime_threshold then
bn_mod_exp_bm_consttime_precomp n mu r2 a bBits b res
else
bn_mod_exp_fw_consttime_precomp n mu r2 a bBits b res
let bn_mod_exp_vartime_precomp #t len bn_mod_exp_bm_vartime_precomp bn_mod_exp_fw_vartime_precomp n mu r2 a bBits b res =
if bBits <. size SE.bn_exp_mont_vartime_threshold then
bn_mod_exp_bm_vartime_precomp n mu r2 a bBits b res
else
bn_mod_exp_fw_vartime_precomp n mu r2 a bBits b res
let mk_bn_mod_exp_precompr2 #t len bn_mod_exp_precomp n r2 a bBits b res =
let h0 = ST.get () in
let mu = BM.mod_inv_limb n.(0ul) in // n * mu = 1 (mod (pow2 64))
Hacl.Spec.Bignum.ModInvLimb.bn_mod_inv_limb_lemma (as_seq h0 n);
bn_mod_exp_precomp n mu r2 a bBits b res | false | false | Hacl.Bignum.Exponentiation.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mk_bn_mod_exp:
#t:limb_t
-> len:BN.meta_len t
-> precomp_r2:BM.bn_precomp_r2_mod_n_st t len
-> bn_mod_exp_precomp:bn_mod_exp_precomp_st t len ->
bn_mod_exp_st t len | [] | Hacl.Bignum.Exponentiation.mk_bn_mod_exp | {
"file_name": "code/bignum/Hacl.Bignum.Exponentiation.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
len: Hacl.Bignum.meta_len t ->
precomp_r2: Hacl.Bignum.Montgomery.bn_precomp_r2_mod_n_st t len ->
bn_mod_exp_precomp: Hacl.Bignum.Exponentiation.bn_mod_exp_precomp_st t len
-> Hacl.Bignum.Exponentiation.bn_mod_exp_st t len | {
"end_col": 14,
"end_line": 152,
"start_col": 2,
"start_line": 148
} |
Prims.Tot | val mk_bn_mod_exp_precomp_mont:
#t:limb_t
-> k:BM.mont t
-> bn_exp_mont: ME.bn_exp_mont_st t k.BM.bn.BN.len ->
bn_mod_exp_precomp_st t k.BM.bn.BN.len | [
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Exponentiation",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.MontExponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Montgomery.Lemmas",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Exponentiation.Lemmas",
"short_module": "E"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontExponentiation",
"short_module": "AE"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.MontExponentiation",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontgomery",
"short_module": "AM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Montgomery",
"short_module": "BM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum",
"short_module": "BN"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Montgomery",
"short_module": "SM"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontgomery",
"short_module": "AM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Montgomery",
"short_module": "BM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum",
"short_module": "BN"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Exponentiation",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mk_bn_mod_exp_precomp_mont #t k bn_exp_mont n mu r2 a bBits b res =
let h0 = ST.get () in
[@inline_let] let len = k.BM.bn.BN.len in
push_frame ();
BD.bn_eval_bound (as_seq h0 n) (v len);
let aM = create len (uint #t #SEC 0) in
BM.to n mu r2 a aM;
SM.bn_to_mont_lemma (as_seq h0 n) mu (as_seq h0 r2) (as_seq h0 a);
let resM = create len (uint #t #SEC 0) in
bn_exp_mont n mu r2 aM bBits b resM;
BM.from n mu resM res;
let h1 = ST.get () in
SM.bn_from_mont_lemma (as_seq h0 n) mu (as_seq h1 resM);
E.mod_exp_mont_ll_lemma (bits t) (v len) (bn_v h0 n) (v mu) (bn_v h0 a) (bn_v h0 b);
assert (bn_v h1 res == Lib.NatMod.pow_mod #(bn_v h0 n) (bn_v h0 a) (bn_v h0 b));
pop_frame () | val mk_bn_mod_exp_precomp_mont:
#t:limb_t
-> k:BM.mont t
-> bn_exp_mont: ME.bn_exp_mont_st t k.BM.bn.BN.len ->
bn_mod_exp_precomp_st t k.BM.bn.BN.len
let mk_bn_mod_exp_precomp_mont #t k bn_exp_mont n mu r2 a bBits b res = | false | null | false | let h0 = ST.get () in
[@@ inline_let ]let len = k.BM.bn.BN.len in
push_frame ();
BD.bn_eval_bound (as_seq h0 n) (v len);
let aM = create len (uint #t #SEC 0) in
BM.to n mu r2 a aM;
SM.bn_to_mont_lemma (as_seq h0 n) mu (as_seq h0 r2) (as_seq h0 a);
let resM = create len (uint #t #SEC 0) in
bn_exp_mont n mu r2 aM bBits b resM;
BM.from n mu resM res;
let h1 = ST.get () in
SM.bn_from_mont_lemma (as_seq h0 n) mu (as_seq h1 resM);
E.mod_exp_mont_ll_lemma (bits t) (v len) (bn_v h0 n) (v mu) (bn_v h0 a) (bn_v h0 b);
assert (bn_v h1 res == Lib.NatMod.pow_mod #(bn_v h0 n) (bn_v h0 a) (bn_v h0 b));
pop_frame () | {
"checked_file": "Hacl.Bignum.Exponentiation.fst.checked",
"dependencies": [
"prims.fst.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.Montgomery.Lemmas.fst.checked",
"Hacl.Spec.Exponentiation.Lemmas.fst.checked",
"Hacl.Spec.Bignum.Montgomery.fsti.checked",
"Hacl.Spec.Bignum.MontExponentiation.fst.checked",
"Hacl.Spec.Bignum.ModInvLimb.fsti.checked",
"Hacl.Spec.Bignum.Exponentiation.fst.checked",
"Hacl.Spec.Bignum.Exponentiation.fst.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"Hacl.Bignum.Montgomery.fst.checked",
"Hacl.Bignum.Montgomery.fst.checked",
"Hacl.Bignum.MontExponentiation.fst.checked",
"Hacl.Bignum.Definitions.fst.checked",
"Hacl.Bignum.AlmostMontgomery.fsti.checked",
"Hacl.Bignum.AlmostMontExponentiation.fst.checked",
"Hacl.Bignum.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": true,
"source_file": "Hacl.Bignum.Exponentiation.fst"
} | [
"total"
] | [
"Hacl.Bignum.Definitions.limb_t",
"Hacl.Bignum.Montgomery.mont",
"Hacl.Bignum.MontExponentiation.bn_exp_mont_st",
"Hacl.Bignum.__proj__Mkbn__item__len",
"Hacl.Bignum.Montgomery.__proj__Mkmont__item__bn",
"Hacl.Bignum.Definitions.lbignum",
"Hacl.Bignum.Definitions.limb",
"Lib.IntTypes.size_t",
"Hacl.Bignum.Definitions.blocks0",
"Lib.IntTypes.size",
"Lib.IntTypes.bits",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Prims._assert",
"Prims.eq2",
"Prims.nat",
"Hacl.Bignum.Definitions.bn_v",
"Lib.NatMod.pow_mod",
"Hacl.Spec.Exponentiation.Lemmas.mod_exp_mont_ll_lemma",
"Lib.IntTypes.v",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Lib.IntTypes.SEC",
"Hacl.Spec.Bignum.Montgomery.bn_from_mont_lemma",
"Lib.Buffer.as_seq",
"Lib.Buffer.MUT",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"Hacl.Bignum.Montgomery.from",
"Lib.Buffer.lbuffer_t",
"Lib.Buffer.create",
"Lib.IntTypes.uint",
"Lib.Buffer.lbuffer",
"Hacl.Spec.Bignum.Montgomery.bn_to_mont_lemma",
"Hacl.Bignum.Montgomery.to",
"Hacl.Spec.Bignum.Definitions.bn_eval_bound",
"FStar.HyperStack.ST.push_frame",
"Hacl.Bignum.meta_len"
] | [] | module Hacl.Bignum.Exponentiation
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Hacl.Bignum.Definitions
module ST = FStar.HyperStack.ST
module BD = Hacl.Spec.Bignum.Definitions
module SM = Hacl.Spec.Bignum.Montgomery
module BN = Hacl.Bignum
module BM = Hacl.Bignum.Montgomery
module AM = Hacl.Bignum.AlmostMontgomery
module ME = Hacl.Bignum.MontExponentiation
module AE = Hacl.Bignum.AlmostMontExponentiation
module E = Hacl.Spec.Exponentiation.Lemmas
module M = Hacl.Spec.Montgomery.Lemmas
module SE = Hacl.Spec.Bignum.MontExponentiation
module S = Hacl.Spec.Bignum.Exponentiation
friend Hacl.Spec.Bignum.Exponentiation
friend Hacl.Bignum.Montgomery
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
let bn_check_mod_exp #t len n a bBits b =
let m0 = BM.bn_check_modulus n in
let bLen = blocks0 bBits (size (bits t)) in
let m1 =
if bBits <. size (bits t) *! bLen
then BN.bn_lt_pow2_mask bLen b bBits
else ones t SEC in
let m2 = BN.bn_lt_mask len a n in
let m = m1 &. m2 in
m0 &. m
inline_for_extraction noextract
val mk_bn_mod_exp_precomp_mont:
#t:limb_t
-> k:BM.mont t
-> bn_exp_mont: ME.bn_exp_mont_st t k.BM.bn.BN.len ->
bn_mod_exp_precomp_st t k.BM.bn.BN.len | false | false | Hacl.Bignum.Exponentiation.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mk_bn_mod_exp_precomp_mont:
#t:limb_t
-> k:BM.mont t
-> bn_exp_mont: ME.bn_exp_mont_st t k.BM.bn.BN.len ->
bn_mod_exp_precomp_st t k.BM.bn.BN.len | [] | Hacl.Bignum.Exponentiation.mk_bn_mod_exp_precomp_mont | {
"file_name": "code/bignum/Hacl.Bignum.Exponentiation.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
k: Hacl.Bignum.Montgomery.mont t ->
bn_exp_mont: Hacl.Bignum.MontExponentiation.bn_exp_mont_st t (Mkbn?.len (Mkmont?.bn k))
-> Hacl.Bignum.Exponentiation.bn_mod_exp_precomp_st t (Mkbn?.len (Mkmont?.bn k)) | {
"end_col": 14,
"end_line": 69,
"start_col": 71,
"start_line": 52
} |
Prims.Tot | val mk_bn_mod_exp_precomp_amont:
#t:limb_t
-> k:AM.almost_mont t
-> bn_exp_amont: AE.bn_exp_almost_mont_st t k.AM.bn.BN.len ->
bn_mod_exp_precomp_st t k.AM.bn.BN.len | [
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Exponentiation",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.MontExponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Montgomery.Lemmas",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Exponentiation.Lemmas",
"short_module": "E"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontExponentiation",
"short_module": "AE"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.MontExponentiation",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontgomery",
"short_module": "AM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Montgomery",
"short_module": "BM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum",
"short_module": "BN"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Montgomery",
"short_module": "SM"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.AlmostMontgomery",
"short_module": "AM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Montgomery",
"short_module": "BM"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum",
"short_module": "BN"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Exponentiation",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mk_bn_mod_exp_precomp_amont #t k bn_exp_amont n mu r2 a bBits b res =
let h0 = ST.get () in
[@inline_let] let len = k.AM.bn.BN.len in
push_frame ();
BD.bn_eval_bound (as_seq h0 n) (v len);
let aM = create len (uint #t #SEC 0) in
AM.to n mu r2 a aM;
SM.bn_to_mont_lemma (as_seq h0 n) mu (as_seq h0 r2) (as_seq h0 a);
M.to_mont_lemma (bits t) (v len) (bn_v h0 n) (v mu) (bn_v h0 a);
let resM = create len (uint #t #SEC 0) in
bn_exp_amont n mu r2 aM bBits b resM;
AM.from n mu resM res;
let h1 = ST.get () in
SM.bn_from_mont_lemma (as_seq h0 n) mu (as_seq h1 resM);
BD.bn_eval_bound (as_seq h1 resM) (v len);
E.mod_exp_mont_ll_mod_lemma (bits t) (v len) (bn_v h0 n) (v mu) (bn_v h0 a) (bn_v h0 b) (bn_v h1 resM);
assert (bn_v h1 res == Lib.NatMod.pow_mod #(bn_v h0 n) (bn_v h0 a) (bn_v h0 b));
pop_frame () | val mk_bn_mod_exp_precomp_amont:
#t:limb_t
-> k:AM.almost_mont t
-> bn_exp_amont: AE.bn_exp_almost_mont_st t k.AM.bn.BN.len ->
bn_mod_exp_precomp_st t k.AM.bn.BN.len
let mk_bn_mod_exp_precomp_amont #t k bn_exp_amont n mu r2 a bBits b res = | false | null | false | let h0 = ST.get () in
[@@ inline_let ]let len = k.AM.bn.BN.len in
push_frame ();
BD.bn_eval_bound (as_seq h0 n) (v len);
let aM = create len (uint #t #SEC 0) in
AM.to n mu r2 a aM;
SM.bn_to_mont_lemma (as_seq h0 n) mu (as_seq h0 r2) (as_seq h0 a);
M.to_mont_lemma (bits t) (v len) (bn_v h0 n) (v mu) (bn_v h0 a);
let resM = create len (uint #t #SEC 0) in
bn_exp_amont n mu r2 aM bBits b resM;
AM.from n mu resM res;
let h1 = ST.get () in
SM.bn_from_mont_lemma (as_seq h0 n) mu (as_seq h1 resM);
BD.bn_eval_bound (as_seq h1 resM) (v len);
E.mod_exp_mont_ll_mod_lemma (bits t)
(v len)
(bn_v h0 n)
(v mu)
(bn_v h0 a)
(bn_v h0 b)
(bn_v h1 resM);
assert (bn_v h1 res == Lib.NatMod.pow_mod #(bn_v h0 n) (bn_v h0 a) (bn_v h0 b));
pop_frame () | {
"checked_file": "Hacl.Bignum.Exponentiation.fst.checked",
"dependencies": [
"prims.fst.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.Montgomery.Lemmas.fst.checked",
"Hacl.Spec.Exponentiation.Lemmas.fst.checked",
"Hacl.Spec.Bignum.Montgomery.fsti.checked",
"Hacl.Spec.Bignum.MontExponentiation.fst.checked",
"Hacl.Spec.Bignum.ModInvLimb.fsti.checked",
"Hacl.Spec.Bignum.Exponentiation.fst.checked",
"Hacl.Spec.Bignum.Exponentiation.fst.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"Hacl.Bignum.Montgomery.fst.checked",
"Hacl.Bignum.Montgomery.fst.checked",
"Hacl.Bignum.MontExponentiation.fst.checked",
"Hacl.Bignum.Definitions.fst.checked",
"Hacl.Bignum.AlmostMontgomery.fsti.checked",
"Hacl.Bignum.AlmostMontExponentiation.fst.checked",
"Hacl.Bignum.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": true,
"source_file": "Hacl.Bignum.Exponentiation.fst"
} | [
"total"
] | [
"Hacl.Bignum.Definitions.limb_t",
"Hacl.Bignum.AlmostMontgomery.almost_mont",
"Hacl.Bignum.AlmostMontExponentiation.bn_exp_almost_mont_st",
"Hacl.Bignum.__proj__Mkbn__item__len",
"Hacl.Bignum.AlmostMontgomery.__proj__Mkalmost_mont__item__bn",
"Hacl.Bignum.Definitions.lbignum",
"Hacl.Bignum.Definitions.limb",
"Lib.IntTypes.size_t",
"Hacl.Bignum.Definitions.blocks0",
"Lib.IntTypes.size",
"Lib.IntTypes.bits",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Prims._assert",
"Prims.eq2",
"Prims.nat",
"Hacl.Bignum.Definitions.bn_v",
"Lib.NatMod.pow_mod",
"Hacl.Spec.Exponentiation.Lemmas.mod_exp_mont_ll_mod_lemma",
"Lib.IntTypes.v",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Lib.IntTypes.SEC",
"Hacl.Spec.Bignum.Definitions.bn_eval_bound",
"Lib.Buffer.as_seq",
"Lib.Buffer.MUT",
"Hacl.Spec.Bignum.Montgomery.bn_from_mont_lemma",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"Hacl.Bignum.AlmostMontgomery.from",
"Lib.Buffer.lbuffer_t",
"Lib.Buffer.create",
"Lib.IntTypes.uint",
"Lib.Buffer.lbuffer",
"Hacl.Spec.Montgomery.Lemmas.to_mont_lemma",
"Hacl.Spec.Bignum.Montgomery.bn_to_mont_lemma",
"Hacl.Bignum.AlmostMontgomery.to",
"FStar.HyperStack.ST.push_frame",
"Hacl.Bignum.meta_len"
] | [] | module Hacl.Bignum.Exponentiation
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Hacl.Bignum.Definitions
module ST = FStar.HyperStack.ST
module BD = Hacl.Spec.Bignum.Definitions
module SM = Hacl.Spec.Bignum.Montgomery
module BN = Hacl.Bignum
module BM = Hacl.Bignum.Montgomery
module AM = Hacl.Bignum.AlmostMontgomery
module ME = Hacl.Bignum.MontExponentiation
module AE = Hacl.Bignum.AlmostMontExponentiation
module E = Hacl.Spec.Exponentiation.Lemmas
module M = Hacl.Spec.Montgomery.Lemmas
module SE = Hacl.Spec.Bignum.MontExponentiation
module S = Hacl.Spec.Bignum.Exponentiation
friend Hacl.Spec.Bignum.Exponentiation
friend Hacl.Bignum.Montgomery
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
let bn_check_mod_exp #t len n a bBits b =
let m0 = BM.bn_check_modulus n in
let bLen = blocks0 bBits (size (bits t)) in
let m1 =
if bBits <. size (bits t) *! bLen
then BN.bn_lt_pow2_mask bLen b bBits
else ones t SEC in
let m2 = BN.bn_lt_mask len a n in
let m = m1 &. m2 in
m0 &. m
inline_for_extraction noextract
val mk_bn_mod_exp_precomp_mont:
#t:limb_t
-> k:BM.mont t
-> bn_exp_mont: ME.bn_exp_mont_st t k.BM.bn.BN.len ->
bn_mod_exp_precomp_st t k.BM.bn.BN.len
let mk_bn_mod_exp_precomp_mont #t k bn_exp_mont n mu r2 a bBits b res =
let h0 = ST.get () in
[@inline_let] let len = k.BM.bn.BN.len in
push_frame ();
BD.bn_eval_bound (as_seq h0 n) (v len);
let aM = create len (uint #t #SEC 0) in
BM.to n mu r2 a aM;
SM.bn_to_mont_lemma (as_seq h0 n) mu (as_seq h0 r2) (as_seq h0 a);
let resM = create len (uint #t #SEC 0) in
bn_exp_mont n mu r2 aM bBits b resM;
BM.from n mu resM res;
let h1 = ST.get () in
SM.bn_from_mont_lemma (as_seq h0 n) mu (as_seq h1 resM);
E.mod_exp_mont_ll_lemma (bits t) (v len) (bn_v h0 n) (v mu) (bn_v h0 a) (bn_v h0 b);
assert (bn_v h1 res == Lib.NatMod.pow_mod #(bn_v h0 n) (bn_v h0 a) (bn_v h0 b));
pop_frame ()
inline_for_extraction noextract
val mk_bn_mod_exp_precomp_amont:
#t:limb_t
-> k:AM.almost_mont t
-> bn_exp_amont: AE.bn_exp_almost_mont_st t k.AM.bn.BN.len ->
bn_mod_exp_precomp_st t k.AM.bn.BN.len | false | false | Hacl.Bignum.Exponentiation.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mk_bn_mod_exp_precomp_amont:
#t:limb_t
-> k:AM.almost_mont t
-> bn_exp_amont: AE.bn_exp_almost_mont_st t k.AM.bn.BN.len ->
bn_mod_exp_precomp_st t k.AM.bn.BN.len | [] | Hacl.Bignum.Exponentiation.mk_bn_mod_exp_precomp_amont | {
"file_name": "code/bignum/Hacl.Bignum.Exponentiation.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
k: Hacl.Bignum.AlmostMontgomery.almost_mont t ->
bn_exp_amont:
Hacl.Bignum.AlmostMontExponentiation.bn_exp_almost_mont_st t (Mkbn?.len (Mkalmost_mont?.bn k))
-> Hacl.Bignum.Exponentiation.bn_mod_exp_precomp_st t (Mkbn?.len (Mkalmost_mont?.bn k)) | {
"end_col": 14,
"end_line": 98,
"start_col": 73,
"start_line": 79
} |
Prims.Tot | val lift_dom: #a: _ -> #b: _ -> q: (a -> b) -> raise_t a -> b | [
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lift_dom #a #b (q:a -> b) : raise_t a -> b =
fun v -> q (downgrade_val v) | val lift_dom: #a: _ -> #b: _ -> q: (a -> b) -> raise_t a -> b
let lift_dom #a #b (q: (a -> b)) : raise_t a -> b = | false | null | false | fun v -> q (downgrade_val v) | {
"checked_file": "FStar.Universe.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Universe.fsti"
} | [
"total"
] | [
"FStar.Universe.raise_t",
"FStar.Universe.downgrade_val"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Universe
(** This module implements some basic facilities to raise the universe of a type *
* The type [raise_t a] is supposed to be isomorphic to [a] but in a higher *
* universe. The two functions [raise_val] and [downgrade_val] allow to coerce *
* from [a] to [raise_t a] and back. **)
(** [raise_t a] is an isomorphic copy of [a] (living in universe 'ua) in universe [max 'ua 'ub] **)
val raise_t ([@@@ strictly_positive] _ : Type u#a) : Type u#(max a b)
(** [raise_val x] injects a value [x] of type [a] to [raise_t a] **)
val raise_val : #a:Type u#a -> x:a -> raise_t u#a u#b a
(** [downgrade_val x] projects a value [x] of type [raise_t a] to [a] **)
val downgrade_val : #a:Type u#a -> x:raise_t u#a u#b a -> a
val downgrade_val_raise_val
(#a: Type u#a)
(x: a)
: Lemma
(downgrade_val u#a u#b (raise_val x) == x)
[SMTPat (downgrade_val u#a u#b (raise_val x))]
val raise_val_downgrade_val
(#a: Type u#a)
(x: raise_t u#a u#b a)
: Lemma
(raise_val (downgrade_val x) == x)
[SMTPat (raise_val u#a u#b (downgrade_val x))] | false | false | FStar.Universe.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lift_dom: #a: _ -> #b: _ -> q: (a -> b) -> raise_t a -> b | [] | FStar.Universe.lift_dom | {
"file_name": "ulib/FStar.Universe.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | q: (_: a -> b) -> _: FStar.Universe.raise_t a -> b | {
"end_col": 30,
"end_line": 48,
"start_col": 2,
"start_line": 48
} |
Prims.Tot | val lift_codom: #a: _ -> #b: _ -> q: (a -> b) -> a -> raise_t b | [
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lift_codom #a #b (q:a -> b) : a -> raise_t b =
fun v -> raise_val (q v) | val lift_codom: #a: _ -> #b: _ -> q: (a -> b) -> a -> raise_t b
let lift_codom #a #b (q: (a -> b)) : a -> raise_t b = | false | null | false | fun v -> raise_val (q v) | {
"checked_file": "FStar.Universe.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Universe.fsti"
} | [
"total"
] | [
"FStar.Universe.raise_val",
"FStar.Universe.raise_t"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Universe
(** This module implements some basic facilities to raise the universe of a type *
* The type [raise_t a] is supposed to be isomorphic to [a] but in a higher *
* universe. The two functions [raise_val] and [downgrade_val] allow to coerce *
* from [a] to [raise_t a] and back. **)
(** [raise_t a] is an isomorphic copy of [a] (living in universe 'ua) in universe [max 'ua 'ub] **)
val raise_t ([@@@ strictly_positive] _ : Type u#a) : Type u#(max a b)
(** [raise_val x] injects a value [x] of type [a] to [raise_t a] **)
val raise_val : #a:Type u#a -> x:a -> raise_t u#a u#b a
(** [downgrade_val x] projects a value [x] of type [raise_t a] to [a] **)
val downgrade_val : #a:Type u#a -> x:raise_t u#a u#b a -> a
val downgrade_val_raise_val
(#a: Type u#a)
(x: a)
: Lemma
(downgrade_val u#a u#b (raise_val x) == x)
[SMTPat (downgrade_val u#a u#b (raise_val x))]
val raise_val_downgrade_val
(#a: Type u#a)
(x: raise_t u#a u#b a)
: Lemma
(raise_val (downgrade_val x) == x)
[SMTPat (raise_val u#a u#b (downgrade_val x))]
let lift_dom #a #b (q:a -> b) : raise_t a -> b =
fun v -> q (downgrade_val v) | false | false | FStar.Universe.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lift_codom: #a: _ -> #b: _ -> q: (a -> b) -> a -> raise_t b | [] | FStar.Universe.lift_codom | {
"file_name": "ulib/FStar.Universe.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | q: (_: a -> b) -> _: a -> FStar.Universe.raise_t b | {
"end_col": 26,
"end_line": 51,
"start_col": 2,
"start_line": 51
} |
Prims.Tot | val va_wp_Inc32 (dst one: va_operand_xmm) (va_s0: va_state) (va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_is_dst_xmm dst va_s0 /\ va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\
va_eval_xmm va_s0 one == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0 /\ (forall
(va_x_dst:va_value_xmm) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl
(va_upd_operand_xmm dst va_x_dst va_s0) in va_get_ok va_sM /\ va_eval_xmm va_sM dst ==
Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 ==> va_k va_sM (()))) | val va_wp_Inc32 (dst one: va_operand_xmm) (va_s0: va_state) (va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_Inc32 (dst one: va_operand_xmm) (va_s0: va_state) (va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_is_dst_xmm dst va_s0 /\ va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\
va_eval_xmm va_s0 one == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0 /\
(forall (va_x_dst: va_value_xmm) (va_x_efl: Vale.X64.Flags.t).
let va_sM = va_upd_flags va_x_efl (va_upd_operand_xmm dst va_x_dst va_s0) in
va_get_ok va_sM /\ va_eval_xmm va_sM dst == Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 ==>
va_k va_sM (()))) | {
"checked_file": "Vale.AES.X64.GCTR.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.AESCTRplain.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCTR.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_operand_xmm",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Vale.X64.Decls.va_is_dst_xmm",
"Vale.X64.Decls.va_is_src_xmm",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.sse_enabled",
"Prims.eq2",
"Vale.Def.Words_s.four",
"Vale.Def.Types_s.nat32",
"Vale.X64.Decls.va_eval_xmm",
"Vale.Def.Words_s.Mkfour",
"Prims.l_Forall",
"Vale.X64.Decls.va_value_xmm",
"Vale.X64.Flags.t",
"Prims.l_imp",
"Vale.Def.Types_s.quad32",
"Vale.AES.GCTR_s.inc32",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_operand_xmm"
] | [] | module Vale.AES.X64.GCTR
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open FStar.Seq
open Vale.AES.AES_s
open Vale.AES.X64.AES
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM_helpers
open Vale.Poly1305.Math
open Vale.Def.Words.Two_s
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.AESCTRplain
open Vale.X64.CPU_Features_s
#reset-options "--z3rlimit 30"
//-- Inc32
val va_code_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_code
val va_codegen_success_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_pbool
val va_lemma_Inc32 : va_b0:va_code -> va_s0:va_state -> dst:va_operand_xmm -> one:va_operand_xmm
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Inc32 dst one) va_s0 /\ va_is_dst_xmm dst va_s0 /\
va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\ va_eval_xmm va_s0 one ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_xmm va_sM dst == Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 /\ va_state_eq va_sM
(va_update_flags va_sM (va_update_ok va_sM (va_update_operand_xmm dst va_sM va_s0)))))
[@ va_qattr]
let va_wp_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) (va_s0:va_state) (va_k:(va_state -> unit | false | true | Vale.AES.X64.GCTR.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Inc32 (dst one: va_operand_xmm) (va_s0: va_state) (va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.AES.X64.GCTR.va_wp_Inc32 | {
"file_name": "obj/Vale.AES.X64.GCTR.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
dst: Vale.X64.Decls.va_operand_xmm ->
one: Vale.X64.Decls.va_operand_xmm ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 73,
"end_line": 51,
"start_col": 2,
"start_line": 47
} |
Prims.Tot | val va_quick_Inc32 (dst one: va_operand_xmm) : (va_quickCode unit (va_code_Inc32 dst one)) | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) : (va_quickCode unit (va_code_Inc32
dst one)) =
(va_QProc (va_code_Inc32 dst one) ([va_Mod_flags; va_mod_xmm dst]) (va_wp_Inc32 dst one)
(va_wpProof_Inc32 dst one)) | val va_quick_Inc32 (dst one: va_operand_xmm) : (va_quickCode unit (va_code_Inc32 dst one))
let va_quick_Inc32 (dst one: va_operand_xmm) : (va_quickCode unit (va_code_Inc32 dst one)) = | false | null | false | (va_QProc (va_code_Inc32 dst one)
([va_Mod_flags; va_mod_xmm dst])
(va_wp_Inc32 dst one)
(va_wpProof_Inc32 dst one)) | {
"checked_file": "Vale.AES.X64.GCTR.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.AESCTRplain.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCTR.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_operand_xmm",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.AES.X64.GCTR.va_code_Inc32",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_mod_xmm",
"Prims.Nil",
"Vale.AES.X64.GCTR.va_wp_Inc32",
"Vale.AES.X64.GCTR.va_wpProof_Inc32",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.AES.X64.GCTR
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open FStar.Seq
open Vale.AES.AES_s
open Vale.AES.X64.AES
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM_helpers
open Vale.Poly1305.Math
open Vale.Def.Words.Two_s
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.AESCTRplain
open Vale.X64.CPU_Features_s
#reset-options "--z3rlimit 30"
//-- Inc32
val va_code_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_code
val va_codegen_success_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_pbool
val va_lemma_Inc32 : va_b0:va_code -> va_s0:va_state -> dst:va_operand_xmm -> one:va_operand_xmm
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Inc32 dst one) va_s0 /\ va_is_dst_xmm dst va_s0 /\
va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\ va_eval_xmm va_s0 one ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_xmm va_sM dst == Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 /\ va_state_eq va_sM
(va_update_flags va_sM (va_update_ok va_sM (va_update_operand_xmm dst va_sM va_s0)))))
[@ va_qattr]
let va_wp_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_is_dst_xmm dst va_s0 /\ va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\
va_eval_xmm va_s0 one == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0 /\ (forall
(va_x_dst:va_value_xmm) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl
(va_upd_operand_xmm dst va_x_dst va_s0) in va_get_ok va_sM /\ va_eval_xmm va_sM dst ==
Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 ==> va_k va_sM (())))
val va_wpProof_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> va_s0:va_state -> va_k:(va_state
-> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Inc32 dst one va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Inc32 dst one) ([va_Mod_flags;
va_mod_xmm dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) : (va_quickCode unit (va_code_Inc32 | false | false | Vale.AES.X64.GCTR.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Inc32 (dst one: va_operand_xmm) : (va_quickCode unit (va_code_Inc32 dst one)) | [] | Vale.AES.X64.GCTR.va_quick_Inc32 | {
"file_name": "obj/Vale.AES.X64.GCTR.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | dst: Vale.X64.Decls.va_operand_xmm -> one: Vale.X64.Decls.va_operand_xmm
-> Vale.X64.QuickCode.va_quickCode Prims.unit (Vale.AES.X64.GCTR.va_code_Inc32 dst one) | {
"end_col": 31,
"end_line": 63,
"start_col": 2,
"start_line": 62
} |
Prims.Tot | val va_quick_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
: (va_quickCode unit (va_code_Gctr_register alg)) | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b) (va_wpProof_Gctr_register alg
key round_keys keys_b)) | val va_quick_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
: (va_quickCode unit (va_code_Gctr_register alg))
let va_quick_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
: (va_quickCode unit (va_code_Gctr_register alg)) = | false | null | false | (va_QProc (va_code_Gctr_register alg)
([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0])
(va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b)) | {
"checked_file": "Vale.AES.X64.GCTR.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.AESCTRplain.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCTR.fsti"
} | [
"total"
] | [
"Vale.AES.AES_common_s.algorithm",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.Decls.quad32",
"Vale.X64.Memory.buffer128",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.AES.X64.GCTR.va_code_Gctr_register",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rR12",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_xmm",
"Prims.Nil",
"Vale.AES.X64.GCTR.va_wp_Gctr_register",
"Vale.AES.X64.GCTR.va_wpProof_Gctr_register",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.AES.X64.GCTR
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open FStar.Seq
open Vale.AES.AES_s
open Vale.AES.X64.AES
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM_helpers
open Vale.Poly1305.Math
open Vale.Def.Words.Two_s
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.AESCTRplain
open Vale.X64.CPU_Features_s
#reset-options "--z3rlimit 30"
//-- Inc32
val va_code_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_code
val va_codegen_success_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_pbool
val va_lemma_Inc32 : va_b0:va_code -> va_s0:va_state -> dst:va_operand_xmm -> one:va_operand_xmm
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Inc32 dst one) va_s0 /\ va_is_dst_xmm dst va_s0 /\
va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\ va_eval_xmm va_s0 one ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_xmm va_sM dst == Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 /\ va_state_eq va_sM
(va_update_flags va_sM (va_update_ok va_sM (va_update_operand_xmm dst va_sM va_s0)))))
[@ va_qattr]
let va_wp_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_is_dst_xmm dst va_s0 /\ va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\
va_eval_xmm va_s0 one == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0 /\ (forall
(va_x_dst:va_value_xmm) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl
(va_upd_operand_xmm dst va_x_dst va_s0) in va_get_ok va_sM /\ va_eval_xmm va_sM dst ==
Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 ==> va_k va_sM (())))
val va_wpProof_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> va_s0:va_state -> va_k:(va_state
-> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Inc32 dst one va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Inc32 dst one) ([va_Mod_flags;
va_mod_xmm dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) : (va_quickCode unit (va_code_Inc32
dst one)) =
(va_QProc (va_code_Inc32 dst one) ([va_Mod_flags; va_mod_xmm dst]) (va_wp_Inc32 dst one)
(va_wpProof_Inc32 dst one))
//--
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12
va_sM (va_update_flags va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_efl:Vale.X64.Flags.t)
(va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12 (va_upd_flags va_x_efl (va_upd_xmm 2
va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 va_s0)))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0,
va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32)) | false | false | Vale.AES.X64.GCTR.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
: (va_quickCode unit (va_code_Gctr_register alg)) | [] | Vale.AES.X64.GCTR.va_quick_Gctr_register | {
"file_name": "obj/Vale.AES.X64.GCTR.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
alg: Vale.AES.AES_common_s.algorithm ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->
round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->
keys_b: Vale.X64.Memory.buffer128
-> Vale.X64.QuickCode.va_quickCode Prims.unit (Vale.AES.X64.GCTR.va_code_Gctr_register alg) | {
"end_col": 27,
"end_line": 119,
"start_col": 2,
"start_line": 117
} |
Prims.Tot | val va_wp_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_efl:Vale.X64.Flags.t)
(va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12 (va_upd_flags va_x_efl (va_upd_xmm 2
va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 va_s0)))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) ==> va_k va_sM (()))) | val va_wp_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_get_ok va_s0 /\
(aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\
FStar.Seq.Base.length #quad32 round_keys == Vale.AES.AES_common_s.nr alg + 1 /\
round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key /\
va_get_reg64 rR8 va_s0 ==
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem_heaplet 0 va_s0) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0)
keys_b
(Vale.AES.AES_common_s.nr alg + 1)
(va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys) /\
(forall (va_x_xmm0: quad32)
(va_x_xmm1: quad32)
(va_x_xmm2: quad32)
(va_x_efl: Vale.X64.Flags.t)
(va_x_r12: nat64).
let va_sM =
va_upd_reg64 rR12
va_x_r12
(va_upd_flags va_x_efl
(va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 va_s0))))
in
va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM)
) ==
Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM)
(Vale.Def.Types_s.le_quad32_to_bytes (va_get_xmm 1 va_s0))
alg
key /\
va_get_xmm 1 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.AES.X64.GCTR.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.AESCTRplain.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCTR.fsti"
} | [
"total"
] | [
"Vale.AES.AES_common_s.algorithm",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.Decls.quad32",
"Vale.X64.Memory.buffer128",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.aesni_enabled",
"Vale.X64.CPU_Features_s.sse_enabled",
"Prims.l_or",
"Prims.op_Equality",
"Vale.AES.AES_common_s.AES_128",
"Vale.AES.AES_common_s.AES_256",
"Vale.AES.AES_s.is_aes_key_LE",
"Prims.eq2",
"Prims.int",
"FStar.Seq.Base.length",
"Prims.op_Addition",
"Vale.AES.AES_common_s.nr",
"Vale.Def.Types_s.quad32",
"Vale.AES.AES_s.key_to_round_keys_LE",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Memory.buffer_addr",
"Vale.X64.Memory.vuint128",
"Vale.X64.Decls.va_get_mem_heaplet",
"Vale.X64.Decls.validSrcAddrs128",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.buffer128_as_seq",
"Prims.l_Forall",
"Vale.X64.Flags.t",
"Vale.X64.Memory.nat64",
"Prims.l_imp",
"Vale.Def.Types_s.nat8",
"Vale.Def.Types_s.le_seq_quad32_to_bytes",
"FStar.Seq.Base.create",
"Vale.X64.Decls.va_get_xmm",
"Vale.AES.GCTR_s.gctr_encrypt_LE",
"Vale.Def.Types_s.le_quad32_to_bytes",
"Vale.AES.GCTR_s.gctr_encrypt_block",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_xmm"
] | [] | module Vale.AES.X64.GCTR
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open FStar.Seq
open Vale.AES.AES_s
open Vale.AES.X64.AES
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM_helpers
open Vale.Poly1305.Math
open Vale.Def.Words.Two_s
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.AESCTRplain
open Vale.X64.CPU_Features_s
#reset-options "--z3rlimit 30"
//-- Inc32
val va_code_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_code
val va_codegen_success_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_pbool
val va_lemma_Inc32 : va_b0:va_code -> va_s0:va_state -> dst:va_operand_xmm -> one:va_operand_xmm
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Inc32 dst one) va_s0 /\ va_is_dst_xmm dst va_s0 /\
va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\ va_eval_xmm va_s0 one ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_xmm va_sM dst == Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 /\ va_state_eq va_sM
(va_update_flags va_sM (va_update_ok va_sM (va_update_operand_xmm dst va_sM va_s0)))))
[@ va_qattr]
let va_wp_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_is_dst_xmm dst va_s0 /\ va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\
va_eval_xmm va_s0 one == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0 /\ (forall
(va_x_dst:va_value_xmm) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl
(va_upd_operand_xmm dst va_x_dst va_s0) in va_get_ok va_sM /\ va_eval_xmm va_sM dst ==
Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 ==> va_k va_sM (())))
val va_wpProof_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> va_s0:va_state -> va_k:(va_state
-> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Inc32 dst one va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Inc32 dst one) ([va_Mod_flags;
va_mod_xmm dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) : (va_quickCode unit (va_code_Inc32
dst one)) =
(va_QProc (va_code_Inc32 dst one) ([va_Mod_flags; va_mod_xmm dst]) (va_wp_Inc32 dst one)
(va_wpProof_Inc32 dst one))
//--
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12
va_sM (va_update_flags va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32)) | false | true | Vale.AES.X64.GCTR.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.AES.X64.GCTR.va_wp_Gctr_register | {
"file_name": "obj/Vale.AES.X64.GCTR.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
alg: Vale.AES.AES_common_s.algorithm ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->
round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->
keys_b: Vale.X64.Memory.buffer128 ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 78,
"end_line": 105,
"start_col": 2,
"start_line": 92
} |
Prims.Tot | val va_quick_Gctr_bytes_extra_work
(alg: algorithm)
(icb_BE: quad32)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(orig_in_ptr orig_out_ptr: nat64)
(num_bytes: nat)
: (va_quickCode unit (va_code_Gctr_bytes_extra_work alg)) | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Gctr_bytes_extra_work (alg:algorithm) (icb_BE:quad32) (in_b:buffer128)
(out_b:buffer128) (key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128)
(orig_in_ptr:nat64) (orig_out_ptr:nat64) (num_bytes:nat) : (va_quickCode unit
(va_code_Gctr_bytes_extra_work alg)) =
(va_QProc (va_code_Gctr_bytes_extra_work alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 4;
va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_mem])
(va_wp_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys keys_b orig_in_ptr
orig_out_ptr num_bytes) (va_wpProof_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys
keys_b orig_in_ptr orig_out_ptr num_bytes)) | val va_quick_Gctr_bytes_extra_work
(alg: algorithm)
(icb_BE: quad32)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(orig_in_ptr orig_out_ptr: nat64)
(num_bytes: nat)
: (va_quickCode unit (va_code_Gctr_bytes_extra_work alg))
let va_quick_Gctr_bytes_extra_work
(alg: algorithm)
(icb_BE: quad32)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(orig_in_ptr orig_out_ptr: nat64)
(num_bytes: nat)
: (va_quickCode unit (va_code_Gctr_bytes_extra_work alg)) = | false | null | false | (va_QProc (va_code_Gctr_bytes_extra_work alg)
([
va_Mod_flags;
va_Mod_mem_heaplet 1;
va_Mod_xmm 4;
va_Mod_xmm 2;
va_Mod_xmm 1;
va_Mod_xmm 0;
va_Mod_reg64 rR12;
va_Mod_reg64 rRdx;
va_Mod_mem
])
(va_wp_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys keys_b orig_in_ptr
orig_out_ptr num_bytes)
(va_wpProof_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys keys_b orig_in_ptr
orig_out_ptr num_bytes)) | {
"checked_file": "Vale.AES.X64.GCTR.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.AESCTRplain.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCTR.fsti"
} | [
"total"
] | [
"Vale.AES.AES_common_s.algorithm",
"Vale.X64.Decls.quad32",
"Vale.X64.Memory.buffer128",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.Memory.nat64",
"Prims.nat",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.AES.X64.GCTR.va_code_Gctr_bytes_extra_work",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_mem_heaplet",
"Vale.X64.QuickCode.va_Mod_xmm",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.QuickCode.va_Mod_mem",
"Prims.Nil",
"Vale.AES.X64.GCTR.va_wp_Gctr_bytes_extra_work",
"Vale.AES.X64.GCTR.va_wpProof_Gctr_bytes_extra_work",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.AES.X64.GCTR
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open FStar.Seq
open Vale.AES.AES_s
open Vale.AES.X64.AES
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM_helpers
open Vale.Poly1305.Math
open Vale.Def.Words.Two_s
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.AESCTRplain
open Vale.X64.CPU_Features_s
#reset-options "--z3rlimit 30"
//-- Inc32
val va_code_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_code
val va_codegen_success_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_pbool
val va_lemma_Inc32 : va_b0:va_code -> va_s0:va_state -> dst:va_operand_xmm -> one:va_operand_xmm
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Inc32 dst one) va_s0 /\ va_is_dst_xmm dst va_s0 /\
va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\ va_eval_xmm va_s0 one ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_xmm va_sM dst == Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 /\ va_state_eq va_sM
(va_update_flags va_sM (va_update_ok va_sM (va_update_operand_xmm dst va_sM va_s0)))))
[@ va_qattr]
let va_wp_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_is_dst_xmm dst va_s0 /\ va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\
va_eval_xmm va_s0 one == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0 /\ (forall
(va_x_dst:va_value_xmm) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl
(va_upd_operand_xmm dst va_x_dst va_s0) in va_get_ok va_sM /\ va_eval_xmm va_sM dst ==
Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 ==> va_k va_sM (())))
val va_wpProof_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> va_s0:va_state -> va_k:(va_state
-> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Inc32 dst one va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Inc32 dst one) ([va_Mod_flags;
va_mod_xmm dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) : (va_quickCode unit (va_code_Inc32
dst one)) =
(va_QProc (va_code_Inc32 dst one) ([va_Mod_flags; va_mod_xmm dst]) (va_wp_Inc32 dst one)
(va_wpProof_Inc32 dst one))
//--
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12
va_sM (va_update_flags va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_efl:Vale.X64.Flags.t)
(va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12 (va_upd_flags va_x_efl (va_upd_xmm 2
va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 va_s0)))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0,
va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b) (va_wpProof_Gctr_register alg
key round_keys keys_b))
//--
//-- Gctr_bytes_extra_work
val va_code_Gctr_bytes_extra_work : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_bytes_extra_work : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_bytes_extra_work : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->
icb_BE:quad32 -> in_b:buffer128 -> out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32)
-> keys_b:buffer128 -> orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_bytes_extra_work alg) va_s0 /\ va_get_ok va_s0 /\
(Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (aesni_enabled /\ sse_enabled) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys /\
(let num_blocks = num_bytes `op_Division` 16 in num_bytes `op_Modulus` 16 =!= 0 /\ va_get_reg64
rR9 va_s0 == orig_in_ptr + 16 `op_Multiply` num_blocks /\ va_get_reg64 rR10 va_s0 ==
orig_out_ptr + 16 `op_Multiply` num_blocks /\ Vale.AES.GCTR.gctr_partial_def alg num_blocks
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE /\ va_get_xmm 7
va_s0 == Vale.AES.GCTR_s.inc32 icb_BE num_blocks))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let num_blocks = num_bytes `op_Division` 16 in Vale.X64.Decls.modifies_buffer128 out_b
(va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\ FStar.Seq.Base.slice
#Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_sM) out_b) 0
num_blocks == FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq
(va_get_mem_heaplet 1 va_s0) out_b) 0 num_blocks /\ Vale.X64.Decls.buffer128_read out_b
num_blocks (va_get_mem_heaplet 1 va_sM) == Vale.AES.GCTR_s.gctr_encrypt_block icb_BE
(Vale.X64.Decls.buffer128_read in_b num_blocks (va_get_mem_heaplet 0 va_sM)) alg key num_blocks
/\ va_get_xmm 1 va_sM == Vale.X64.Decls.buffer128_read out_b num_blocks (va_get_mem_heaplet 1
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM
(va_update_xmm 4 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR12 va_sM (va_update_reg64 rRdx va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0))))))))))))
[@ va_qattr]
let va_wp_Gctr_bytes_extra_work (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64)
(orig_out_ptr:nat64) (num_bytes:nat) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr
in_b (Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (aesni_enabled /\ sse_enabled) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys /\
(let num_blocks = num_bytes `op_Division` 16 in num_bytes `op_Modulus` 16 =!= 0 /\ va_get_reg64
rR9 va_s0 == orig_in_ptr + 16 `op_Multiply` num_blocks /\ va_get_reg64 rR10 va_s0 ==
orig_out_ptr + 16 `op_Multiply` num_blocks /\ Vale.AES.GCTR.gctr_partial_def alg num_blocks
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE /\ va_get_xmm 7
va_s0 == Vale.AES.GCTR_s.inc32 icb_BE num_blocks)) /\ (forall (va_x_mem:vale_heap)
(va_x_rdx:nat64) (va_x_r12:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm4:quad32) (va_x_heap1:vale_heap) (va_x_efl:Vale.X64.Flags.t) . let va_sM =
va_upd_flags va_x_efl (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 2
va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRdx va_x_rdx (va_upd_mem va_x_mem va_s0)))))))) in va_get_ok va_sM /\ (let
num_blocks = num_bytes `op_Division` 16 in Vale.X64.Decls.modifies_buffer128 out_b
(va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\ FStar.Seq.Base.slice
#Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_sM) out_b) 0
num_blocks == FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq
(va_get_mem_heaplet 1 va_s0) out_b) 0 num_blocks /\ Vale.X64.Decls.buffer128_read out_b
num_blocks (va_get_mem_heaplet 1 va_sM) == Vale.AES.GCTR_s.gctr_encrypt_block icb_BE
(Vale.X64.Decls.buffer128_read in_b num_blocks (va_get_mem_heaplet 0 va_sM)) alg key num_blocks
/\ va_get_xmm 1 va_sM == Vale.X64.Decls.buffer128_read out_b num_blocks (va_get_mem_heaplet 1
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Gctr_bytes_extra_work : alg:algorithm -> icb_BE:quad32 -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->
orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys
keys_b orig_in_ptr orig_out_ptr num_bytes va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_bytes_extra_work alg)
([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 4; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_bytes_extra_work (alg:algorithm) (icb_BE:quad32) (in_b:buffer128)
(out_b:buffer128) (key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128)
(orig_in_ptr:nat64) (orig_out_ptr:nat64) (num_bytes:nat) : (va_quickCode unit | false | false | Vale.AES.X64.GCTR.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Gctr_bytes_extra_work
(alg: algorithm)
(icb_BE: quad32)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(orig_in_ptr orig_out_ptr: nat64)
(num_bytes: nat)
: (va_quickCode unit (va_code_Gctr_bytes_extra_work alg)) | [] | Vale.AES.X64.GCTR.va_quick_Gctr_bytes_extra_work | {
"file_name": "obj/Vale.AES.X64.GCTR.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
alg: Vale.AES.AES_common_s.algorithm ->
icb_BE: Vale.X64.Decls.quad32 ->
in_b: Vale.X64.Memory.buffer128 ->
out_b: Vale.X64.Memory.buffer128 ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->
round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->
keys_b: Vale.X64.Memory.buffer128 ->
orig_in_ptr: Vale.X64.Memory.nat64 ->
orig_out_ptr: Vale.X64.Memory.nat64 ->
num_bytes: Prims.nat
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.AES.X64.GCTR.va_code_Gctr_bytes_extra_work alg) | {
"end_col": 47,
"end_line": 229,
"start_col": 2,
"start_line": 225
} |
Prims.Tot | val va_quick_Gctr_bytes_no_extra
(alg: algorithm)
(icb_BE: quad32)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(orig_in_ptr orig_out_ptr: nat64)
(num_bytes: nat)
: (va_quickCode unit (va_code_Gctr_bytes_no_extra alg)) | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Gctr_bytes_no_extra (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64)
(orig_out_ptr:nat64) (num_bytes:nat) : (va_quickCode unit (va_code_Gctr_bytes_no_extra alg)) =
(va_QProc (va_code_Gctr_bytes_no_extra alg) ([]) (va_wp_Gctr_bytes_no_extra alg icb_BE in_b out_b
key round_keys keys_b orig_in_ptr orig_out_ptr num_bytes) (va_wpProof_Gctr_bytes_no_extra alg
icb_BE in_b out_b key round_keys keys_b orig_in_ptr orig_out_ptr num_bytes)) | val va_quick_Gctr_bytes_no_extra
(alg: algorithm)
(icb_BE: quad32)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(orig_in_ptr orig_out_ptr: nat64)
(num_bytes: nat)
: (va_quickCode unit (va_code_Gctr_bytes_no_extra alg))
let va_quick_Gctr_bytes_no_extra
(alg: algorithm)
(icb_BE: quad32)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(orig_in_ptr orig_out_ptr: nat64)
(num_bytes: nat)
: (va_quickCode unit (va_code_Gctr_bytes_no_extra alg)) = | false | null | false | (va_QProc (va_code_Gctr_bytes_no_extra alg)
([])
(va_wp_Gctr_bytes_no_extra alg icb_BE in_b out_b key round_keys keys_b orig_in_ptr orig_out_ptr
num_bytes)
(va_wpProof_Gctr_bytes_no_extra alg icb_BE in_b out_b key round_keys keys_b orig_in_ptr
orig_out_ptr num_bytes)) | {
"checked_file": "Vale.AES.X64.GCTR.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.AESCTRplain.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCTR.fsti"
} | [
"total"
] | [
"Vale.AES.AES_common_s.algorithm",
"Vale.X64.Decls.quad32",
"Vale.X64.Memory.buffer128",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.Memory.nat64",
"Prims.nat",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.AES.X64.GCTR.va_code_Gctr_bytes_no_extra",
"Prims.Nil",
"Vale.X64.QuickCode.mod_t",
"Vale.AES.X64.GCTR.va_wp_Gctr_bytes_no_extra",
"Vale.AES.X64.GCTR.va_wpProof_Gctr_bytes_no_extra",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.AES.X64.GCTR
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open FStar.Seq
open Vale.AES.AES_s
open Vale.AES.X64.AES
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM_helpers
open Vale.Poly1305.Math
open Vale.Def.Words.Two_s
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.AESCTRplain
open Vale.X64.CPU_Features_s
#reset-options "--z3rlimit 30"
//-- Inc32
val va_code_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_code
val va_codegen_success_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_pbool
val va_lemma_Inc32 : va_b0:va_code -> va_s0:va_state -> dst:va_operand_xmm -> one:va_operand_xmm
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Inc32 dst one) va_s0 /\ va_is_dst_xmm dst va_s0 /\
va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\ va_eval_xmm va_s0 one ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_xmm va_sM dst == Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 /\ va_state_eq va_sM
(va_update_flags va_sM (va_update_ok va_sM (va_update_operand_xmm dst va_sM va_s0)))))
[@ va_qattr]
let va_wp_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_is_dst_xmm dst va_s0 /\ va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\
va_eval_xmm va_s0 one == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0 /\ (forall
(va_x_dst:va_value_xmm) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl
(va_upd_operand_xmm dst va_x_dst va_s0) in va_get_ok va_sM /\ va_eval_xmm va_sM dst ==
Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 ==> va_k va_sM (())))
val va_wpProof_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> va_s0:va_state -> va_k:(va_state
-> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Inc32 dst one va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Inc32 dst one) ([va_Mod_flags;
va_mod_xmm dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) : (va_quickCode unit (va_code_Inc32
dst one)) =
(va_QProc (va_code_Inc32 dst one) ([va_Mod_flags; va_mod_xmm dst]) (va_wp_Inc32 dst one)
(va_wpProof_Inc32 dst one))
//--
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12
va_sM (va_update_flags va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_efl:Vale.X64.Flags.t)
(va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12 (va_upd_flags va_x_efl (va_upd_xmm 2
va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 va_s0)))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0,
va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b) (va_wpProof_Gctr_register alg
key round_keys keys_b))
//--
//-- Gctr_bytes_extra_work
val va_code_Gctr_bytes_extra_work : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_bytes_extra_work : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_bytes_extra_work : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->
icb_BE:quad32 -> in_b:buffer128 -> out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32)
-> keys_b:buffer128 -> orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_bytes_extra_work alg) va_s0 /\ va_get_ok va_s0 /\
(Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (aesni_enabled /\ sse_enabled) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys /\
(let num_blocks = num_bytes `op_Division` 16 in num_bytes `op_Modulus` 16 =!= 0 /\ va_get_reg64
rR9 va_s0 == orig_in_ptr + 16 `op_Multiply` num_blocks /\ va_get_reg64 rR10 va_s0 ==
orig_out_ptr + 16 `op_Multiply` num_blocks /\ Vale.AES.GCTR.gctr_partial_def alg num_blocks
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE /\ va_get_xmm 7
va_s0 == Vale.AES.GCTR_s.inc32 icb_BE num_blocks))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let num_blocks = num_bytes `op_Division` 16 in Vale.X64.Decls.modifies_buffer128 out_b
(va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\ FStar.Seq.Base.slice
#Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_sM) out_b) 0
num_blocks == FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq
(va_get_mem_heaplet 1 va_s0) out_b) 0 num_blocks /\ Vale.X64.Decls.buffer128_read out_b
num_blocks (va_get_mem_heaplet 1 va_sM) == Vale.AES.GCTR_s.gctr_encrypt_block icb_BE
(Vale.X64.Decls.buffer128_read in_b num_blocks (va_get_mem_heaplet 0 va_sM)) alg key num_blocks
/\ va_get_xmm 1 va_sM == Vale.X64.Decls.buffer128_read out_b num_blocks (va_get_mem_heaplet 1
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM
(va_update_xmm 4 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR12 va_sM (va_update_reg64 rRdx va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0))))))))))))
[@ va_qattr]
let va_wp_Gctr_bytes_extra_work (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64)
(orig_out_ptr:nat64) (num_bytes:nat) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr
in_b (Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (aesni_enabled /\ sse_enabled) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys /\
(let num_blocks = num_bytes `op_Division` 16 in num_bytes `op_Modulus` 16 =!= 0 /\ va_get_reg64
rR9 va_s0 == orig_in_ptr + 16 `op_Multiply` num_blocks /\ va_get_reg64 rR10 va_s0 ==
orig_out_ptr + 16 `op_Multiply` num_blocks /\ Vale.AES.GCTR.gctr_partial_def alg num_blocks
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE /\ va_get_xmm 7
va_s0 == Vale.AES.GCTR_s.inc32 icb_BE num_blocks)) /\ (forall (va_x_mem:vale_heap)
(va_x_rdx:nat64) (va_x_r12:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm4:quad32) (va_x_heap1:vale_heap) (va_x_efl:Vale.X64.Flags.t) . let va_sM =
va_upd_flags va_x_efl (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 2
va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRdx va_x_rdx (va_upd_mem va_x_mem va_s0)))))))) in va_get_ok va_sM /\ (let
num_blocks = num_bytes `op_Division` 16 in Vale.X64.Decls.modifies_buffer128 out_b
(va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\ FStar.Seq.Base.slice
#Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_sM) out_b) 0
num_blocks == FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq
(va_get_mem_heaplet 1 va_s0) out_b) 0 num_blocks /\ Vale.X64.Decls.buffer128_read out_b
num_blocks (va_get_mem_heaplet 1 va_sM) == Vale.AES.GCTR_s.gctr_encrypt_block icb_BE
(Vale.X64.Decls.buffer128_read in_b num_blocks (va_get_mem_heaplet 0 va_sM)) alg key num_blocks
/\ va_get_xmm 1 va_sM == Vale.X64.Decls.buffer128_read out_b num_blocks (va_get_mem_heaplet 1
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Gctr_bytes_extra_work : alg:algorithm -> icb_BE:quad32 -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->
orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys
keys_b orig_in_ptr orig_out_ptr num_bytes va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_bytes_extra_work alg)
([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 4; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_bytes_extra_work (alg:algorithm) (icb_BE:quad32) (in_b:buffer128)
(out_b:buffer128) (key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128)
(orig_in_ptr:nat64) (orig_out_ptr:nat64) (num_bytes:nat) : (va_quickCode unit
(va_code_Gctr_bytes_extra_work alg)) =
(va_QProc (va_code_Gctr_bytes_extra_work alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 4;
va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_mem])
(va_wp_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys keys_b orig_in_ptr
orig_out_ptr num_bytes) (va_wpProof_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys
keys_b orig_in_ptr orig_out_ptr num_bytes))
//--
//-- Gctr_bytes_no_extra
val va_code_Gctr_bytes_no_extra : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_bytes_no_extra : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_bytes_no_extra : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->
icb_BE:quad32 -> in_b:buffer128 -> out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32)
-> keys_b:buffer128 -> orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_bytes_no_extra alg) va_s0 /\ va_get_ok va_s0 /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg
key /\ FStar.Seq.Base.length #quad32 round_keys == Vale.AES.AES_common_s.nr alg + 1 /\
round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key /\ (let num_blocks = num_bytes
`op_Division` 16 in num_bytes `op_Modulus` 16 == 0 /\ Vale.AES.GCTR.gctr_partial_def alg
num_blocks (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ (let plain = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0
va_sM) in_b)) 0 num_bytes in let cipher = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1
va_sM) out_b)) 0 num_bytes in cipher == Vale.AES.GCTR_s.gctr_encrypt_LE icb_BE
(Vale.AES.GCTR.make_gctr_plain_LE plain) alg key) /\ va_state_eq va_sM (va_update_ok va_sM
va_s0)))
[@ va_qattr]
let va_wp_Gctr_bytes_no_extra (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64)
(orig_out_ptr:nat64) (num_bytes:nat) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg
key /\ FStar.Seq.Base.length #quad32 round_keys == Vale.AES.AES_common_s.nr alg + 1 /\
round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key /\ (let num_blocks = num_bytes
`op_Division` 16 in num_bytes `op_Modulus` 16 == 0 /\ Vale.AES.GCTR.gctr_partial_def alg
num_blocks (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE) /\ (let va_sM
= va_s0 in va_get_ok va_sM /\ Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1
va_s0) (va_get_mem_heaplet 1 va_sM) /\ (let plain = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0
va_sM) in_b)) 0 num_bytes in let cipher = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1
va_sM) out_b)) 0 num_bytes in cipher == Vale.AES.GCTR_s.gctr_encrypt_LE icb_BE
(Vale.AES.GCTR.make_gctr_plain_LE plain) alg key) ==> va_k va_sM (())))
val va_wpProof_Gctr_bytes_no_extra : alg:algorithm -> icb_BE:quad32 -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->
orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_bytes_no_extra alg icb_BE in_b out_b key round_keys
keys_b orig_in_ptr orig_out_ptr num_bytes va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_bytes_no_extra alg) ([]) va_s0
va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_bytes_no_extra (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64) | false | false | Vale.AES.X64.GCTR.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Gctr_bytes_no_extra
(alg: algorithm)
(icb_BE: quad32)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(orig_in_ptr orig_out_ptr: nat64)
(num_bytes: nat)
: (va_quickCode unit (va_code_Gctr_bytes_no_extra alg)) | [] | Vale.AES.X64.GCTR.va_quick_Gctr_bytes_no_extra | {
"file_name": "obj/Vale.AES.X64.GCTR.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
alg: Vale.AES.AES_common_s.algorithm ->
icb_BE: Vale.X64.Decls.quad32 ->
in_b: Vale.X64.Memory.buffer128 ->
out_b: Vale.X64.Memory.buffer128 ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->
round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->
keys_b: Vale.X64.Memory.buffer128 ->
orig_in_ptr: Vale.X64.Memory.nat64 ->
orig_out_ptr: Vale.X64.Memory.nat64 ->
num_bytes: Prims.nat
-> Vale.X64.QuickCode.va_quickCode Prims.unit (Vale.AES.X64.GCTR.va_code_Gctr_bytes_no_extra alg) | {
"end_col": 80,
"end_line": 310,
"start_col": 2,
"start_line": 308
} |
Prims.Tot | val va_wp_Gctr_bytes_no_extra
(alg: algorithm)
(icb_BE: quad32)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(orig_in_ptr orig_out_ptr: nat64)
(num_bytes: nat)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Gctr_bytes_no_extra (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64)
(orig_out_ptr:nat64) (num_bytes:nat) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg
key /\ FStar.Seq.Base.length #quad32 round_keys == Vale.AES.AES_common_s.nr alg + 1 /\
round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key /\ (let num_blocks = num_bytes
`op_Division` 16 in num_bytes `op_Modulus` 16 == 0 /\ Vale.AES.GCTR.gctr_partial_def alg
num_blocks (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE) /\ (let va_sM
= va_s0 in va_get_ok va_sM /\ Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1
va_s0) (va_get_mem_heaplet 1 va_sM) /\ (let plain = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0
va_sM) in_b)) 0 num_bytes in let cipher = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1
va_sM) out_b)) 0 num_bytes in cipher == Vale.AES.GCTR_s.gctr_encrypt_LE icb_BE
(Vale.AES.GCTR.make_gctr_plain_LE plain) alg key) ==> va_k va_sM (()))) | val va_wp_Gctr_bytes_no_extra
(alg: algorithm)
(icb_BE: quad32)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(orig_in_ptr orig_out_ptr: nat64)
(num_bytes: nat)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_Gctr_bytes_no_extra
(alg: algorithm)
(icb_BE: quad32)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(orig_in_ptr orig_out_ptr: nat64)
(num_bytes: nat)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_get_ok va_s0 /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
orig_in_ptr
in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes)
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0)
orig_out_ptr
out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes)
(va_get_mem_layout va_s0)
Secret /\
orig_in_ptr + 16 `op_Multiply` (Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` (Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b ==
Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
((Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b) `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\
FStar.Seq.Base.length #quad32 round_keys == Vale.AES.AES_common_s.nr alg + 1 /\
round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key /\
(let num_blocks = num_bytes `op_Division` 16 in
num_bytes `op_Modulus` 16 == 0 /\
Vale.AES.GCTR.gctr_partial_def alg
num_blocks
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b)
key
icb_BE) /\
(let va_sM = va_s0 in
va_get_ok va_sM /\
Vale.X64.Decls.modifies_buffer128 out_b
(va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\
(let plain =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet
0
va_sM)
in_b))
0
num_bytes
in
let cipher =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet
1
va_sM)
out_b))
0
num_bytes
in
cipher ==
Vale.AES.GCTR_s.gctr_encrypt_LE icb_BE (Vale.AES.GCTR.make_gctr_plain_LE plain) alg key) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.AES.X64.GCTR.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.AESCTRplain.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCTR.fsti"
} | [
"total"
] | [
"Vale.AES.AES_common_s.algorithm",
"Vale.X64.Decls.quad32",
"Vale.X64.Memory.buffer128",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.Memory.nat64",
"Prims.nat",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.Decls.validSrcAddrs128",
"Vale.X64.Decls.va_get_mem_heaplet",
"Vale.AES.GCM_helpers.bytes_to_quad_size",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.validDstAddrs128",
"Prims.op_LessThan",
"Prims.op_Addition",
"Prims.op_Multiply",
"Vale.X64.Machine_s.pow2_64",
"Prims.eq2",
"Vale.X64.Decls.buffer_length",
"Vale.X64.Memory.vuint128",
"Prims.int",
"Vale.X64.Machine_s.pow2_32",
"Prims.l_or",
"Prims.op_Equality",
"Vale.AES.AES_common_s.AES_128",
"Vale.AES.AES_common_s.AES_256",
"Vale.AES.AES_s.is_aes_key_LE",
"FStar.Seq.Base.length",
"Vale.AES.AES_common_s.nr",
"Vale.Def.Types_s.quad32",
"Vale.AES.AES_s.key_to_round_keys_LE",
"Prims.op_Modulus",
"Vale.AES.GCTR.gctr_partial_def",
"Vale.X64.Decls.buffer128_as_seq",
"Prims.op_Division",
"Prims.l_imp",
"Vale.X64.Decls.modifies_buffer128",
"Vale.Def.Types_s.nat8",
"Vale.AES.GCTR_s.gctr_encrypt_LE",
"Vale.AES.GCTR.make_gctr_plain_LE",
"Vale.Def.Words_s.nat8",
"FStar.Seq.Base.slice",
"Vale.Def.Types_s.le_seq_quad32_to_bytes",
"Vale.X64.State.vale_state"
] | [] | module Vale.AES.X64.GCTR
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open FStar.Seq
open Vale.AES.AES_s
open Vale.AES.X64.AES
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM_helpers
open Vale.Poly1305.Math
open Vale.Def.Words.Two_s
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.AESCTRplain
open Vale.X64.CPU_Features_s
#reset-options "--z3rlimit 30"
//-- Inc32
val va_code_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_code
val va_codegen_success_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_pbool
val va_lemma_Inc32 : va_b0:va_code -> va_s0:va_state -> dst:va_operand_xmm -> one:va_operand_xmm
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Inc32 dst one) va_s0 /\ va_is_dst_xmm dst va_s0 /\
va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\ va_eval_xmm va_s0 one ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_xmm va_sM dst == Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 /\ va_state_eq va_sM
(va_update_flags va_sM (va_update_ok va_sM (va_update_operand_xmm dst va_sM va_s0)))))
[@ va_qattr]
let va_wp_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_is_dst_xmm dst va_s0 /\ va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\
va_eval_xmm va_s0 one == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0 /\ (forall
(va_x_dst:va_value_xmm) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl
(va_upd_operand_xmm dst va_x_dst va_s0) in va_get_ok va_sM /\ va_eval_xmm va_sM dst ==
Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 ==> va_k va_sM (())))
val va_wpProof_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> va_s0:va_state -> va_k:(va_state
-> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Inc32 dst one va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Inc32 dst one) ([va_Mod_flags;
va_mod_xmm dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) : (va_quickCode unit (va_code_Inc32
dst one)) =
(va_QProc (va_code_Inc32 dst one) ([va_Mod_flags; va_mod_xmm dst]) (va_wp_Inc32 dst one)
(va_wpProof_Inc32 dst one))
//--
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12
va_sM (va_update_flags va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_efl:Vale.X64.Flags.t)
(va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12 (va_upd_flags va_x_efl (va_upd_xmm 2
va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 va_s0)))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0,
va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b) (va_wpProof_Gctr_register alg
key round_keys keys_b))
//--
//-- Gctr_bytes_extra_work
val va_code_Gctr_bytes_extra_work : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_bytes_extra_work : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_bytes_extra_work : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->
icb_BE:quad32 -> in_b:buffer128 -> out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32)
-> keys_b:buffer128 -> orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_bytes_extra_work alg) va_s0 /\ va_get_ok va_s0 /\
(Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (aesni_enabled /\ sse_enabled) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys /\
(let num_blocks = num_bytes `op_Division` 16 in num_bytes `op_Modulus` 16 =!= 0 /\ va_get_reg64
rR9 va_s0 == orig_in_ptr + 16 `op_Multiply` num_blocks /\ va_get_reg64 rR10 va_s0 ==
orig_out_ptr + 16 `op_Multiply` num_blocks /\ Vale.AES.GCTR.gctr_partial_def alg num_blocks
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE /\ va_get_xmm 7
va_s0 == Vale.AES.GCTR_s.inc32 icb_BE num_blocks))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let num_blocks = num_bytes `op_Division` 16 in Vale.X64.Decls.modifies_buffer128 out_b
(va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\ FStar.Seq.Base.slice
#Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_sM) out_b) 0
num_blocks == FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq
(va_get_mem_heaplet 1 va_s0) out_b) 0 num_blocks /\ Vale.X64.Decls.buffer128_read out_b
num_blocks (va_get_mem_heaplet 1 va_sM) == Vale.AES.GCTR_s.gctr_encrypt_block icb_BE
(Vale.X64.Decls.buffer128_read in_b num_blocks (va_get_mem_heaplet 0 va_sM)) alg key num_blocks
/\ va_get_xmm 1 va_sM == Vale.X64.Decls.buffer128_read out_b num_blocks (va_get_mem_heaplet 1
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM
(va_update_xmm 4 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR12 va_sM (va_update_reg64 rRdx va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0))))))))))))
[@ va_qattr]
let va_wp_Gctr_bytes_extra_work (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64)
(orig_out_ptr:nat64) (num_bytes:nat) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr
in_b (Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (aesni_enabled /\ sse_enabled) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys /\
(let num_blocks = num_bytes `op_Division` 16 in num_bytes `op_Modulus` 16 =!= 0 /\ va_get_reg64
rR9 va_s0 == orig_in_ptr + 16 `op_Multiply` num_blocks /\ va_get_reg64 rR10 va_s0 ==
orig_out_ptr + 16 `op_Multiply` num_blocks /\ Vale.AES.GCTR.gctr_partial_def alg num_blocks
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE /\ va_get_xmm 7
va_s0 == Vale.AES.GCTR_s.inc32 icb_BE num_blocks)) /\ (forall (va_x_mem:vale_heap)
(va_x_rdx:nat64) (va_x_r12:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm4:quad32) (va_x_heap1:vale_heap) (va_x_efl:Vale.X64.Flags.t) . let va_sM =
va_upd_flags va_x_efl (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 2
va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRdx va_x_rdx (va_upd_mem va_x_mem va_s0)))))))) in va_get_ok va_sM /\ (let
num_blocks = num_bytes `op_Division` 16 in Vale.X64.Decls.modifies_buffer128 out_b
(va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\ FStar.Seq.Base.slice
#Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_sM) out_b) 0
num_blocks == FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq
(va_get_mem_heaplet 1 va_s0) out_b) 0 num_blocks /\ Vale.X64.Decls.buffer128_read out_b
num_blocks (va_get_mem_heaplet 1 va_sM) == Vale.AES.GCTR_s.gctr_encrypt_block icb_BE
(Vale.X64.Decls.buffer128_read in_b num_blocks (va_get_mem_heaplet 0 va_sM)) alg key num_blocks
/\ va_get_xmm 1 va_sM == Vale.X64.Decls.buffer128_read out_b num_blocks (va_get_mem_heaplet 1
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Gctr_bytes_extra_work : alg:algorithm -> icb_BE:quad32 -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->
orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys
keys_b orig_in_ptr orig_out_ptr num_bytes va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_bytes_extra_work alg)
([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 4; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_bytes_extra_work (alg:algorithm) (icb_BE:quad32) (in_b:buffer128)
(out_b:buffer128) (key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128)
(orig_in_ptr:nat64) (orig_out_ptr:nat64) (num_bytes:nat) : (va_quickCode unit
(va_code_Gctr_bytes_extra_work alg)) =
(va_QProc (va_code_Gctr_bytes_extra_work alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 4;
va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_mem])
(va_wp_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys keys_b orig_in_ptr
orig_out_ptr num_bytes) (va_wpProof_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys
keys_b orig_in_ptr orig_out_ptr num_bytes))
//--
//-- Gctr_bytes_no_extra
val va_code_Gctr_bytes_no_extra : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_bytes_no_extra : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_bytes_no_extra : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->
icb_BE:quad32 -> in_b:buffer128 -> out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32)
-> keys_b:buffer128 -> orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_bytes_no_extra alg) va_s0 /\ va_get_ok va_s0 /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg
key /\ FStar.Seq.Base.length #quad32 round_keys == Vale.AES.AES_common_s.nr alg + 1 /\
round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key /\ (let num_blocks = num_bytes
`op_Division` 16 in num_bytes `op_Modulus` 16 == 0 /\ Vale.AES.GCTR.gctr_partial_def alg
num_blocks (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ (let plain = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0
va_sM) in_b)) 0 num_bytes in let cipher = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1
va_sM) out_b)) 0 num_bytes in cipher == Vale.AES.GCTR_s.gctr_encrypt_LE icb_BE
(Vale.AES.GCTR.make_gctr_plain_LE plain) alg key) /\ va_state_eq va_sM (va_update_ok va_sM
va_s0)))
[@ va_qattr]
let va_wp_Gctr_bytes_no_extra (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64) | false | true | Vale.AES.X64.GCTR.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Gctr_bytes_no_extra
(alg: algorithm)
(icb_BE: quad32)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(orig_in_ptr orig_out_ptr: nat64)
(num_bytes: nat)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.AES.X64.GCTR.va_wp_Gctr_bytes_no_extra | {
"file_name": "obj/Vale.AES.X64.GCTR.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
alg: Vale.AES.AES_common_s.algorithm ->
icb_BE: Vale.X64.Decls.quad32 ->
in_b: Vale.X64.Memory.buffer128 ->
out_b: Vale.X64.Memory.buffer128 ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->
round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->
keys_b: Vale.X64.Memory.buffer128 ->
orig_in_ptr: Vale.X64.Memory.nat64 ->
orig_out_ptr: Vale.X64.Memory.nat64 ->
num_bytes: Prims.nat ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 75,
"end_line": 293,
"start_col": 2,
"start_line": 271
} |
Prims.Tot | val va_wp_Gctr_bytes_extra_work
(alg: algorithm)
(icb_BE: quad32)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(orig_in_ptr orig_out_ptr: nat64)
(num_bytes: nat)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Gctr_bytes_extra_work (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64)
(orig_out_ptr:nat64) (num_bytes:nat) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr
in_b (Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (aesni_enabled /\ sse_enabled) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys /\
(let num_blocks = num_bytes `op_Division` 16 in num_bytes `op_Modulus` 16 =!= 0 /\ va_get_reg64
rR9 va_s0 == orig_in_ptr + 16 `op_Multiply` num_blocks /\ va_get_reg64 rR10 va_s0 ==
orig_out_ptr + 16 `op_Multiply` num_blocks /\ Vale.AES.GCTR.gctr_partial_def alg num_blocks
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE /\ va_get_xmm 7
va_s0 == Vale.AES.GCTR_s.inc32 icb_BE num_blocks)) /\ (forall (va_x_mem:vale_heap)
(va_x_rdx:nat64) (va_x_r12:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm4:quad32) (va_x_heap1:vale_heap) (va_x_efl:Vale.X64.Flags.t) . let va_sM =
va_upd_flags va_x_efl (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 2
va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRdx va_x_rdx (va_upd_mem va_x_mem va_s0)))))))) in va_get_ok va_sM /\ (let
num_blocks = num_bytes `op_Division` 16 in Vale.X64.Decls.modifies_buffer128 out_b
(va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\ FStar.Seq.Base.slice
#Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_sM) out_b) 0
num_blocks == FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq
(va_get_mem_heaplet 1 va_s0) out_b) 0 num_blocks /\ Vale.X64.Decls.buffer128_read out_b
num_blocks (va_get_mem_heaplet 1 va_sM) == Vale.AES.GCTR_s.gctr_encrypt_block icb_BE
(Vale.X64.Decls.buffer128_read in_b num_blocks (va_get_mem_heaplet 0 va_sM)) alg key num_blocks
/\ va_get_xmm 1 va_sM == Vale.X64.Decls.buffer128_read out_b num_blocks (va_get_mem_heaplet 1
va_sM)) ==> va_k va_sM (()))) | val va_wp_Gctr_bytes_extra_work
(alg: algorithm)
(icb_BE: quad32)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(orig_in_ptr orig_out_ptr: nat64)
(num_bytes: nat)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_Gctr_bytes_extra_work
(alg: algorithm)
(icb_BE: quad32)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(orig_in_ptr orig_out_ptr: nat64)
(num_bytes: nat)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_get_ok va_s0 /\
(Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
orig_in_ptr
in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes)
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0)
orig_out_ptr
out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes)
(va_get_mem_layout va_s0)
Secret /\
orig_in_ptr + 16 `op_Multiply` (Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` (Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b ==
Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
((Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b) `op_Multiply` 16 < pow2_32)
)
(num_bytes < pow2_32) /\ (aesni_enabled /\ sse_enabled) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\
FStar.Seq.Base.length #quad32 round_keys == Vale.AES.AES_common_s.nr alg + 1 /\
round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key /\
va_get_reg64 rR8 va_s0 ==
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem_heaplet 0 va_s0) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0)
keys_b
(Vale.AES.AES_common_s.nr alg + 1)
(va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys /\
(let num_blocks = num_bytes `op_Division` 16 in
num_bytes `op_Modulus` 16 =!= 0 /\
va_get_reg64 rR9 va_s0 == orig_in_ptr + 16 `op_Multiply` num_blocks /\
va_get_reg64 rR10 va_s0 == orig_out_ptr + 16 `op_Multiply` num_blocks /\
Vale.AES.GCTR.gctr_partial_def alg
num_blocks
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b)
key
icb_BE /\ va_get_xmm 7 va_s0 == Vale.AES.GCTR_s.inc32 icb_BE num_blocks)) /\
(forall (va_x_mem: vale_heap)
(va_x_rdx: nat64)
(va_x_r12: nat64)
(va_x_xmm0: quad32)
(va_x_xmm1: quad32)
(va_x_xmm2: quad32)
(va_x_xmm4: quad32)
(va_x_heap1: vale_heap)
(va_x_efl: Vale.X64.Flags.t).
let va_sM =
va_upd_flags va_x_efl
(va_upd_mem_heaplet 1
va_x_heap1
(va_upd_xmm 4
va_x_xmm4
(va_upd_xmm 2
va_x_xmm2
(va_upd_xmm 1
va_x_xmm1
(va_upd_xmm 0
va_x_xmm0
(va_upd_reg64 rR12
va_x_r12
(va_upd_reg64 rRdx va_x_rdx (va_upd_mem va_x_mem va_s0))))))))
in
va_get_ok va_sM /\
(let num_blocks = num_bytes `op_Division` 16 in
Vale.X64.Decls.modifies_buffer128 out_b
(va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\
FStar.Seq.Base.slice #Vale.X64.Decls.quad32
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_sM) out_b)
0
num_blocks ==
FStar.Seq.Base.slice #Vale.X64.Decls.quad32
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b)
0
num_blocks /\
Vale.X64.Decls.buffer128_read out_b num_blocks (va_get_mem_heaplet 1 va_sM) ==
Vale.AES.GCTR_s.gctr_encrypt_block icb_BE
(Vale.X64.Decls.buffer128_read in_b num_blocks (va_get_mem_heaplet 0 va_sM))
alg
key
num_blocks /\
va_get_xmm 1 va_sM ==
Vale.X64.Decls.buffer128_read out_b num_blocks (va_get_mem_heaplet 1 va_sM)) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.AES.X64.GCTR.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.AESCTRplain.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCTR.fsti"
} | [
"total"
] | [
"Vale.AES.AES_common_s.algorithm",
"Vale.X64.Decls.quad32",
"Vale.X64.Memory.buffer128",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.Memory.nat64",
"Prims.nat",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.Decls.validSrcAddrs128",
"Vale.X64.Decls.va_get_mem_heaplet",
"Vale.AES.GCM_helpers.bytes_to_quad_size",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.validDstAddrs128",
"Prims.op_LessThan",
"Prims.op_Addition",
"Prims.op_Multiply",
"Vale.X64.Machine_s.pow2_64",
"Prims.eq2",
"Vale.X64.Decls.buffer_length",
"Vale.X64.Memory.vuint128",
"Prims.int",
"Vale.X64.Machine_s.pow2_32",
"Vale.X64.CPU_Features_s.aesni_enabled",
"Vale.X64.CPU_Features_s.sse_enabled",
"Prims.l_or",
"Prims.op_Equality",
"Vale.AES.AES_common_s.AES_128",
"Vale.AES.AES_common_s.AES_256",
"Vale.AES.AES_s.is_aes_key_LE",
"FStar.Seq.Base.length",
"Vale.AES.AES_common_s.nr",
"Vale.AES.AES_s.key_to_round_keys_LE",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Memory.buffer_addr",
"Vale.X64.Decls.buffer128_as_seq",
"Prims.l_not",
"Prims.op_Modulus",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR10",
"Vale.AES.GCTR.gctr_partial_def",
"Vale.Def.Types_s.quad32",
"Vale.X64.Decls.va_get_xmm",
"Vale.AES.GCTR_s.inc32",
"Prims.op_Division",
"Prims.l_Forall",
"Vale.X64.InsBasic.vale_heap",
"Vale.X64.Flags.t",
"Prims.l_imp",
"Vale.X64.Decls.modifies_buffer128",
"FStar.Seq.Base.slice",
"Vale.X64.Decls.buffer128_read",
"Vale.AES.GCTR_s.gctr_encrypt_block",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_mem_heaplet",
"Vale.X64.Decls.va_upd_xmm",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Decls.va_upd_mem"
] | [] | module Vale.AES.X64.GCTR
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open FStar.Seq
open Vale.AES.AES_s
open Vale.AES.X64.AES
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM_helpers
open Vale.Poly1305.Math
open Vale.Def.Words.Two_s
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.AESCTRplain
open Vale.X64.CPU_Features_s
#reset-options "--z3rlimit 30"
//-- Inc32
val va_code_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_code
val va_codegen_success_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_pbool
val va_lemma_Inc32 : va_b0:va_code -> va_s0:va_state -> dst:va_operand_xmm -> one:va_operand_xmm
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Inc32 dst one) va_s0 /\ va_is_dst_xmm dst va_s0 /\
va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\ va_eval_xmm va_s0 one ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_xmm va_sM dst == Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 /\ va_state_eq va_sM
(va_update_flags va_sM (va_update_ok va_sM (va_update_operand_xmm dst va_sM va_s0)))))
[@ va_qattr]
let va_wp_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_is_dst_xmm dst va_s0 /\ va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\
va_eval_xmm va_s0 one == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0 /\ (forall
(va_x_dst:va_value_xmm) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl
(va_upd_operand_xmm dst va_x_dst va_s0) in va_get_ok va_sM /\ va_eval_xmm va_sM dst ==
Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 ==> va_k va_sM (())))
val va_wpProof_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> va_s0:va_state -> va_k:(va_state
-> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Inc32 dst one va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Inc32 dst one) ([va_Mod_flags;
va_mod_xmm dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) : (va_quickCode unit (va_code_Inc32
dst one)) =
(va_QProc (va_code_Inc32 dst one) ([va_Mod_flags; va_mod_xmm dst]) (va_wp_Inc32 dst one)
(va_wpProof_Inc32 dst one))
//--
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12
va_sM (va_update_flags va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_efl:Vale.X64.Flags.t)
(va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12 (va_upd_flags va_x_efl (va_upd_xmm 2
va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 va_s0)))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0,
va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b) (va_wpProof_Gctr_register alg
key round_keys keys_b))
//--
//-- Gctr_bytes_extra_work
val va_code_Gctr_bytes_extra_work : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_bytes_extra_work : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_bytes_extra_work : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->
icb_BE:quad32 -> in_b:buffer128 -> out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32)
-> keys_b:buffer128 -> orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_bytes_extra_work alg) va_s0 /\ va_get_ok va_s0 /\
(Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (aesni_enabled /\ sse_enabled) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys /\
(let num_blocks = num_bytes `op_Division` 16 in num_bytes `op_Modulus` 16 =!= 0 /\ va_get_reg64
rR9 va_s0 == orig_in_ptr + 16 `op_Multiply` num_blocks /\ va_get_reg64 rR10 va_s0 ==
orig_out_ptr + 16 `op_Multiply` num_blocks /\ Vale.AES.GCTR.gctr_partial_def alg num_blocks
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE /\ va_get_xmm 7
va_s0 == Vale.AES.GCTR_s.inc32 icb_BE num_blocks))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let num_blocks = num_bytes `op_Division` 16 in Vale.X64.Decls.modifies_buffer128 out_b
(va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\ FStar.Seq.Base.slice
#Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_sM) out_b) 0
num_blocks == FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq
(va_get_mem_heaplet 1 va_s0) out_b) 0 num_blocks /\ Vale.X64.Decls.buffer128_read out_b
num_blocks (va_get_mem_heaplet 1 va_sM) == Vale.AES.GCTR_s.gctr_encrypt_block icb_BE
(Vale.X64.Decls.buffer128_read in_b num_blocks (va_get_mem_heaplet 0 va_sM)) alg key num_blocks
/\ va_get_xmm 1 va_sM == Vale.X64.Decls.buffer128_read out_b num_blocks (va_get_mem_heaplet 1
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM
(va_update_xmm 4 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR12 va_sM (va_update_reg64 rRdx va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0))))))))))))
[@ va_qattr]
let va_wp_Gctr_bytes_extra_work (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64) | false | true | Vale.AES.X64.GCTR.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Gctr_bytes_extra_work
(alg: algorithm)
(icb_BE: quad32)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(orig_in_ptr orig_out_ptr: nat64)
(num_bytes: nat)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.AES.X64.GCTR.va_wp_Gctr_bytes_extra_work | {
"file_name": "obj/Vale.AES.X64.GCTR.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
alg: Vale.AES.AES_common_s.algorithm ->
icb_BE: Vale.X64.Decls.quad32 ->
in_b: Vale.X64.Memory.buffer128 ->
out_b: Vale.X64.Memory.buffer128 ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->
round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->
keys_b: Vale.X64.Memory.buffer128 ->
orig_in_ptr: Vale.X64.Memory.nat64 ->
orig_out_ptr: Vale.X64.Memory.nat64 ->
num_bytes: Prims.nat ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 33,
"end_line": 208,
"start_col": 2,
"start_line": 172
} |
Prims.Tot | val va_req_Gctr_bytes_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(alg: algorithm)
(in_b: buffer128)
(num_bytes: nat64)
(out_b inout_b keys_b ctr_b: buffer128)
(num_blocks: nat64)
(key: (seq nat32))
: prop | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_req_Gctr_bytes_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(in_b:buffer128) (num_bytes:nat64) (out_b:buffer128) (inout_b:buffer128) (keys_b:buffer128)
(ctr_b:buffer128) (num_blocks:nat64) (key:(seq nat32)) : prop =
(va_require_total va_b0 (va_code_Gctr_bytes_stdcall win alg) va_s0 /\ va_get_ok va_s0 /\ (let
(in_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (out_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (ctr_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack
va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (win
==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0))
/\ (win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 16)
(va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ num_bytes == (if win then va_get_reg64 rRdx va_s0 else
va_get_reg64 rRsi va_s0) /\ num_blocks == (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 32 + 8 + 16) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0)) /\ Vale.X64.Decls.buffers_disjoint128 in_b
out_b /\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\ (Vale.X64.Decls.buffers_disjoint128
in_b keys_b \/ in_b == keys_b) /\ Vale.X64.Decls.buffer_disjoints128 ctr_b ([in_b; out_b;
keys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([in_b; out_b; keys_b; ctr_b]) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in_ptr in_b num_blocks (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out_ptr out_b num_blocks
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128
(va_get_mem va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) ctr_ptr ctr_b 1 (va_get_mem_layout
va_s0) Secret /\ in_ptr + 16 `op_Multiply` num_blocks < pow2_64 /\ out_ptr + 16 `op_Multiply`
num_blocks < pow2_64 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
num_blocks /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 ctr_b == 1 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
inout_b == 1 /\ 256 `op_Multiply` Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b <
pow2_32 /\ 4096 `op_Multiply` num_blocks `op_Multiply` 16 < pow2_32 /\ (num_blocks
`op_Multiply` 128 `op_Division` 8 <= num_bytes /\ num_bytes < num_blocks `op_Multiply` 128
`op_Division` 8 + 128 `op_Division` 8) /\ (aesni_enabled /\ avx_enabled /\ sse_enabled) /\ (alg
= AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key)) | val va_req_Gctr_bytes_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(alg: algorithm)
(in_b: buffer128)
(num_bytes: nat64)
(out_b inout_b keys_b ctr_b: buffer128)
(num_blocks: nat64)
(key: (seq nat32))
: prop
let va_req_Gctr_bytes_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(alg: algorithm)
(in_b: buffer128)
(num_bytes: nat64)
(out_b inout_b keys_b ctr_b: buffer128)
(num_blocks: nat64)
(key: (seq nat32))
: prop = | false | null | false | (va_require_total va_b0 (va_code_Gctr_bytes_stdcall win alg) va_s0 /\ va_get_ok va_s0 /\
(let in_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0)
in
let out_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0)
in
let inout_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0)
in
let keys_ptr:(va_int_range 0 18446744073709551615) =
(if win
then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)
else va_get_reg64 rR8 va_s0)
in
let ctr_ptr:(va_int_range 0 18446744073709551615) =
(if win
then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)
else va_get_reg64 rR9 va_s0)
in
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 16)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 0)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
num_bytes == (if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) /\
num_blocks ==
(if win
then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 16) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0)) /\
Vale.X64.Decls.buffers_disjoint128 in_b out_b /\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\
(Vale.X64.Decls.buffers_disjoint128 in_b keys_b \/ in_b == keys_b) /\
Vale.X64.Decls.buffer_disjoints128 ctr_b ([in_b; out_b; keys_b]) /\
Vale.X64.Decls.buffer_disjoints128 inout_b ([in_b; out_b; keys_b; ctr_b]) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in_ptr
in_b
num_blocks
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out_ptr
out_b
num_blocks
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr
inout_b
1
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
keys_ptr
keys_b
(Vale.AES.AES_common_s.nr alg + 1)
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
ctr_ptr
ctr_b
1
(va_get_mem_layout va_s0)
Secret /\ in_ptr + 16 `op_Multiply` num_blocks < pow2_64 /\
out_ptr + 16 `op_Multiply` num_blocks < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b == num_blocks /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 ctr_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
256 `op_Multiply` (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b) < pow2_32 /\
(4096 `op_Multiply` num_blocks) `op_Multiply` 16 < pow2_32 /\
((num_blocks `op_Multiply` 128) `op_Division` 8 <= num_bytes /\
num_bytes < (num_blocks `op_Multiply` 128) `op_Division` 8 + 128 `op_Division` 8) /\
(aesni_enabled /\ avx_enabled /\ sse_enabled) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key)) | {
"checked_file": "Vale.AES.X64.GCTR.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.AESCTRplain.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCTR.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_code",
"Vale.X64.Decls.va_state",
"Prims.bool",
"Vale.AES.AES_common_s.algorithm",
"Vale.X64.Memory.buffer128",
"Vale.X64.Memory.nat64",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Prims.l_and",
"Vale.X64.Decls.va_require_total",
"Vale.AES.X64.GCTR.va_code_Gctr_bytes_stdcall",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Prims.eq2",
"Vale.Def.Words_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Stack_i.init_rsp",
"Vale.X64.Decls.va_get_stack",
"Vale.X64.Memory.is_initial_heap",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Decls.va_get_mem",
"Prims.l_imp",
"Vale.X64.Stack_i.valid_stack_slot64",
"Prims.op_Addition",
"Vale.Arch.HeapTypes_s.Public",
"Vale.X64.Decls.va_get_stackTaint",
"Prims.l_not",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Stack_i.load_stack64",
"Vale.X64.Decls.buffers_disjoint128",
"Prims.l_or",
"Vale.X64.Decls.buffer_disjoints128",
"Prims.Cons",
"Prims.Nil",
"Vale.X64.Decls.validSrcAddrs128",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.validDstAddrs128",
"Vale.AES.AES_common_s.nr",
"Prims.op_LessThan",
"Prims.op_Multiply",
"Vale.X64.Machine_s.pow2_64",
"Prims.nat",
"Vale.X64.Decls.buffer_length",
"Vale.X64.Memory.vuint128",
"Prims.int",
"Vale.X64.Machine_s.pow2_32",
"Prims.op_LessThanOrEqual",
"Prims.op_Division",
"Vale.X64.CPU_Features_s.aesni_enabled",
"Vale.X64.CPU_Features_s.avx_enabled",
"Vale.X64.CPU_Features_s.sse_enabled",
"Prims.op_Equality",
"Vale.AES.AES_common_s.AES_128",
"Vale.AES.AES_common_s.AES_256",
"Vale.AES.AES_s.is_aes_key_LE",
"Vale.Def.Types_s.quad32",
"Vale.X64.Decls.buffer128_as_seq",
"Vale.AES.AES_s.key_to_round_keys_LE",
"Vale.X64.Decls.va_int_range",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rRdi",
"Prims.prop"
] | [] | module Vale.AES.X64.GCTR
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open FStar.Seq
open Vale.AES.AES_s
open Vale.AES.X64.AES
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM_helpers
open Vale.Poly1305.Math
open Vale.Def.Words.Two_s
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.AESCTRplain
open Vale.X64.CPU_Features_s
#reset-options "--z3rlimit 30"
//-- Inc32
val va_code_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_code
val va_codegen_success_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_pbool
val va_lemma_Inc32 : va_b0:va_code -> va_s0:va_state -> dst:va_operand_xmm -> one:va_operand_xmm
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Inc32 dst one) va_s0 /\ va_is_dst_xmm dst va_s0 /\
va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\ va_eval_xmm va_s0 one ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_xmm va_sM dst == Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 /\ va_state_eq va_sM
(va_update_flags va_sM (va_update_ok va_sM (va_update_operand_xmm dst va_sM va_s0)))))
[@ va_qattr]
let va_wp_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_is_dst_xmm dst va_s0 /\ va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\
va_eval_xmm va_s0 one == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0 /\ (forall
(va_x_dst:va_value_xmm) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl
(va_upd_operand_xmm dst va_x_dst va_s0) in va_get_ok va_sM /\ va_eval_xmm va_sM dst ==
Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 ==> va_k va_sM (())))
val va_wpProof_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> va_s0:va_state -> va_k:(va_state
-> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Inc32 dst one va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Inc32 dst one) ([va_Mod_flags;
va_mod_xmm dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) : (va_quickCode unit (va_code_Inc32
dst one)) =
(va_QProc (va_code_Inc32 dst one) ([va_Mod_flags; va_mod_xmm dst]) (va_wp_Inc32 dst one)
(va_wpProof_Inc32 dst one))
//--
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12
va_sM (va_update_flags va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_efl:Vale.X64.Flags.t)
(va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12 (va_upd_flags va_x_efl (va_upd_xmm 2
va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 va_s0)))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0,
va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b) (va_wpProof_Gctr_register alg
key round_keys keys_b))
//--
//-- Gctr_bytes_extra_work
val va_code_Gctr_bytes_extra_work : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_bytes_extra_work : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_bytes_extra_work : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->
icb_BE:quad32 -> in_b:buffer128 -> out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32)
-> keys_b:buffer128 -> orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_bytes_extra_work alg) va_s0 /\ va_get_ok va_s0 /\
(Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (aesni_enabled /\ sse_enabled) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys /\
(let num_blocks = num_bytes `op_Division` 16 in num_bytes `op_Modulus` 16 =!= 0 /\ va_get_reg64
rR9 va_s0 == orig_in_ptr + 16 `op_Multiply` num_blocks /\ va_get_reg64 rR10 va_s0 ==
orig_out_ptr + 16 `op_Multiply` num_blocks /\ Vale.AES.GCTR.gctr_partial_def alg num_blocks
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE /\ va_get_xmm 7
va_s0 == Vale.AES.GCTR_s.inc32 icb_BE num_blocks))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let num_blocks = num_bytes `op_Division` 16 in Vale.X64.Decls.modifies_buffer128 out_b
(va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\ FStar.Seq.Base.slice
#Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_sM) out_b) 0
num_blocks == FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq
(va_get_mem_heaplet 1 va_s0) out_b) 0 num_blocks /\ Vale.X64.Decls.buffer128_read out_b
num_blocks (va_get_mem_heaplet 1 va_sM) == Vale.AES.GCTR_s.gctr_encrypt_block icb_BE
(Vale.X64.Decls.buffer128_read in_b num_blocks (va_get_mem_heaplet 0 va_sM)) alg key num_blocks
/\ va_get_xmm 1 va_sM == Vale.X64.Decls.buffer128_read out_b num_blocks (va_get_mem_heaplet 1
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM
(va_update_xmm 4 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR12 va_sM (va_update_reg64 rRdx va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0))))))))))))
[@ va_qattr]
let va_wp_Gctr_bytes_extra_work (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64)
(orig_out_ptr:nat64) (num_bytes:nat) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr
in_b (Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (aesni_enabled /\ sse_enabled) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys /\
(let num_blocks = num_bytes `op_Division` 16 in num_bytes `op_Modulus` 16 =!= 0 /\ va_get_reg64
rR9 va_s0 == orig_in_ptr + 16 `op_Multiply` num_blocks /\ va_get_reg64 rR10 va_s0 ==
orig_out_ptr + 16 `op_Multiply` num_blocks /\ Vale.AES.GCTR.gctr_partial_def alg num_blocks
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE /\ va_get_xmm 7
va_s0 == Vale.AES.GCTR_s.inc32 icb_BE num_blocks)) /\ (forall (va_x_mem:vale_heap)
(va_x_rdx:nat64) (va_x_r12:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm4:quad32) (va_x_heap1:vale_heap) (va_x_efl:Vale.X64.Flags.t) . let va_sM =
va_upd_flags va_x_efl (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 2
va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRdx va_x_rdx (va_upd_mem va_x_mem va_s0)))))))) in va_get_ok va_sM /\ (let
num_blocks = num_bytes `op_Division` 16 in Vale.X64.Decls.modifies_buffer128 out_b
(va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\ FStar.Seq.Base.slice
#Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_sM) out_b) 0
num_blocks == FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq
(va_get_mem_heaplet 1 va_s0) out_b) 0 num_blocks /\ Vale.X64.Decls.buffer128_read out_b
num_blocks (va_get_mem_heaplet 1 va_sM) == Vale.AES.GCTR_s.gctr_encrypt_block icb_BE
(Vale.X64.Decls.buffer128_read in_b num_blocks (va_get_mem_heaplet 0 va_sM)) alg key num_blocks
/\ va_get_xmm 1 va_sM == Vale.X64.Decls.buffer128_read out_b num_blocks (va_get_mem_heaplet 1
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Gctr_bytes_extra_work : alg:algorithm -> icb_BE:quad32 -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->
orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys
keys_b orig_in_ptr orig_out_ptr num_bytes va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_bytes_extra_work alg)
([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 4; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_bytes_extra_work (alg:algorithm) (icb_BE:quad32) (in_b:buffer128)
(out_b:buffer128) (key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128)
(orig_in_ptr:nat64) (orig_out_ptr:nat64) (num_bytes:nat) : (va_quickCode unit
(va_code_Gctr_bytes_extra_work alg)) =
(va_QProc (va_code_Gctr_bytes_extra_work alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 4;
va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_mem])
(va_wp_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys keys_b orig_in_ptr
orig_out_ptr num_bytes) (va_wpProof_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys
keys_b orig_in_ptr orig_out_ptr num_bytes))
//--
//-- Gctr_bytes_no_extra
val va_code_Gctr_bytes_no_extra : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_bytes_no_extra : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_bytes_no_extra : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->
icb_BE:quad32 -> in_b:buffer128 -> out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32)
-> keys_b:buffer128 -> orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_bytes_no_extra alg) va_s0 /\ va_get_ok va_s0 /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg
key /\ FStar.Seq.Base.length #quad32 round_keys == Vale.AES.AES_common_s.nr alg + 1 /\
round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key /\ (let num_blocks = num_bytes
`op_Division` 16 in num_bytes `op_Modulus` 16 == 0 /\ Vale.AES.GCTR.gctr_partial_def alg
num_blocks (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ (let plain = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0
va_sM) in_b)) 0 num_bytes in let cipher = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1
va_sM) out_b)) 0 num_bytes in cipher == Vale.AES.GCTR_s.gctr_encrypt_LE icb_BE
(Vale.AES.GCTR.make_gctr_plain_LE plain) alg key) /\ va_state_eq va_sM (va_update_ok va_sM
va_s0)))
[@ va_qattr]
let va_wp_Gctr_bytes_no_extra (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64)
(orig_out_ptr:nat64) (num_bytes:nat) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg
key /\ FStar.Seq.Base.length #quad32 round_keys == Vale.AES.AES_common_s.nr alg + 1 /\
round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key /\ (let num_blocks = num_bytes
`op_Division` 16 in num_bytes `op_Modulus` 16 == 0 /\ Vale.AES.GCTR.gctr_partial_def alg
num_blocks (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE) /\ (let va_sM
= va_s0 in va_get_ok va_sM /\ Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1
va_s0) (va_get_mem_heaplet 1 va_sM) /\ (let plain = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0
va_sM) in_b)) 0 num_bytes in let cipher = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1
va_sM) out_b)) 0 num_bytes in cipher == Vale.AES.GCTR_s.gctr_encrypt_LE icb_BE
(Vale.AES.GCTR.make_gctr_plain_LE plain) alg key) ==> va_k va_sM (())))
val va_wpProof_Gctr_bytes_no_extra : alg:algorithm -> icb_BE:quad32 -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->
orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_bytes_no_extra alg icb_BE in_b out_b key round_keys
keys_b orig_in_ptr orig_out_ptr num_bytes va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_bytes_no_extra alg) ([]) va_s0
va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_bytes_no_extra (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64)
(orig_out_ptr:nat64) (num_bytes:nat) : (va_quickCode unit (va_code_Gctr_bytes_no_extra alg)) =
(va_QProc (va_code_Gctr_bytes_no_extra alg) ([]) (va_wp_Gctr_bytes_no_extra alg icb_BE in_b out_b
key round_keys keys_b orig_in_ptr orig_out_ptr num_bytes) (va_wpProof_Gctr_bytes_no_extra alg
icb_BE in_b out_b key round_keys keys_b orig_in_ptr orig_out_ptr num_bytes))
//--
//-- Gctr_bytes_stdcall
val va_code_Gctr_bytes_stdcall : win:bool -> alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_bytes_stdcall : win:bool -> alg:algorithm -> Tot va_pbool
let va_req_Gctr_bytes_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(in_b:buffer128) (num_bytes:nat64) (out_b:buffer128) (inout_b:buffer128) (keys_b:buffer128) | false | true | Vale.AES.X64.GCTR.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_req_Gctr_bytes_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(alg: algorithm)
(in_b: buffer128)
(num_bytes: nat64)
(out_b inout_b keys_b ctr_b: buffer128)
(num_blocks: nat64)
(key: (seq nat32))
: prop | [] | Vale.AES.X64.GCTR.va_req_Gctr_bytes_stdcall | {
"file_name": "obj/Vale.AES.X64.GCTR.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
va_b0: Vale.X64.Decls.va_code ->
va_s0: Vale.X64.Decls.va_state ->
win: Prims.bool ->
alg: Vale.AES.AES_common_s.algorithm ->
in_b: Vale.X64.Memory.buffer128 ->
num_bytes: Vale.X64.Memory.nat64 ->
out_b: Vale.X64.Memory.buffer128 ->
inout_b: Vale.X64.Memory.buffer128 ->
keys_b: Vale.X64.Memory.buffer128 ->
ctr_b: Vale.X64.Memory.buffer128 ->
num_blocks: Vale.X64.Memory.nat64 ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32
-> Prims.prop | {
"end_col": 49,
"end_line": 361,
"start_col": 2,
"start_line": 320
} |
Prims.Tot | val va_quick_Gctr_bytes_stdcall
(win: bool)
(alg: algorithm)
(in_b: buffer128)
(num_bytes: nat64)
(out_b inout_b keys_b ctr_b: buffer128)
(num_blocks: nat64)
(key: (seq nat32))
: (va_quickCode unit (va_code_Gctr_bytes_stdcall win alg)) | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Gctr_bytes_stdcall (win:bool) (alg:algorithm) (in_b:buffer128) (num_bytes:nat64)
(out_b:buffer128) (inout_b:buffer128) (keys_b:buffer128) (ctr_b:buffer128) (num_blocks:nat64)
(key:(seq nat32)) : (va_quickCode unit (va_code_Gctr_bytes_stdcall win alg)) =
(va_QProc (va_code_Gctr_bytes_stdcall win alg) ([va_Mod_stackTaint; va_Mod_stack; va_Mod_flags;
va_Mod_mem_layout; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14;
va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;
va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp;
va_Mod_reg64 rRsp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx;
va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Gctr_bytes_stdcall win alg in_b
num_bytes out_b inout_b keys_b ctr_b num_blocks key) (va_wpProof_Gctr_bytes_stdcall win alg
in_b num_bytes out_b inout_b keys_b ctr_b num_blocks key)) | val va_quick_Gctr_bytes_stdcall
(win: bool)
(alg: algorithm)
(in_b: buffer128)
(num_bytes: nat64)
(out_b inout_b keys_b ctr_b: buffer128)
(num_blocks: nat64)
(key: (seq nat32))
: (va_quickCode unit (va_code_Gctr_bytes_stdcall win alg))
let va_quick_Gctr_bytes_stdcall
(win: bool)
(alg: algorithm)
(in_b: buffer128)
(num_bytes: nat64)
(out_b inout_b keys_b ctr_b: buffer128)
(num_blocks: nat64)
(key: (seq nat32))
: (va_quickCode unit (va_code_Gctr_bytes_stdcall win alg)) = | false | null | false | (va_QProc (va_code_Gctr_bytes_stdcall win alg)
([
va_Mod_stackTaint; va_Mod_stack; va_Mod_flags; va_Mod_mem_layout; va_Mod_mem_heaplet 2;
va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12;
va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;
va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp;
va_Mod_reg64 rRsp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx;
va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem
])
(va_wp_Gctr_bytes_stdcall win alg in_b num_bytes out_b inout_b keys_b ctr_b num_blocks key)
(va_wpProof_Gctr_bytes_stdcall win alg in_b num_bytes out_b inout_b keys_b ctr_b num_blocks key)
) | {
"checked_file": "Vale.AES.X64.GCTR.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.AESCTRplain.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCTR.fsti"
} | [
"total"
] | [
"Prims.bool",
"Vale.AES.AES_common_s.algorithm",
"Vale.X64.Memory.buffer128",
"Vale.X64.Memory.nat64",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.AES.X64.GCTR.va_code_Gctr_bytes_stdcall",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_stackTaint",
"Vale.X64.QuickCode.va_Mod_stack",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_mem_layout",
"Vale.X64.QuickCode.va_Mod_mem_heaplet",
"Vale.X64.QuickCode.va_Mod_xmm",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rR15",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRax",
"Vale.X64.QuickCode.va_Mod_mem",
"Prims.Nil",
"Vale.AES.X64.GCTR.va_wp_Gctr_bytes_stdcall",
"Vale.AES.X64.GCTR.va_wpProof_Gctr_bytes_stdcall",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.AES.X64.GCTR
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open FStar.Seq
open Vale.AES.AES_s
open Vale.AES.X64.AES
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM_helpers
open Vale.Poly1305.Math
open Vale.Def.Words.Two_s
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.AESCTRplain
open Vale.X64.CPU_Features_s
#reset-options "--z3rlimit 30"
//-- Inc32
val va_code_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_code
val va_codegen_success_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_pbool
val va_lemma_Inc32 : va_b0:va_code -> va_s0:va_state -> dst:va_operand_xmm -> one:va_operand_xmm
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Inc32 dst one) va_s0 /\ va_is_dst_xmm dst va_s0 /\
va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\ va_eval_xmm va_s0 one ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_xmm va_sM dst == Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 /\ va_state_eq va_sM
(va_update_flags va_sM (va_update_ok va_sM (va_update_operand_xmm dst va_sM va_s0)))))
[@ va_qattr]
let va_wp_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_is_dst_xmm dst va_s0 /\ va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\
va_eval_xmm va_s0 one == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0 /\ (forall
(va_x_dst:va_value_xmm) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl
(va_upd_operand_xmm dst va_x_dst va_s0) in va_get_ok va_sM /\ va_eval_xmm va_sM dst ==
Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 ==> va_k va_sM (())))
val va_wpProof_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> va_s0:va_state -> va_k:(va_state
-> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Inc32 dst one va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Inc32 dst one) ([va_Mod_flags;
va_mod_xmm dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) : (va_quickCode unit (va_code_Inc32
dst one)) =
(va_QProc (va_code_Inc32 dst one) ([va_Mod_flags; va_mod_xmm dst]) (va_wp_Inc32 dst one)
(va_wpProof_Inc32 dst one))
//--
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12
va_sM (va_update_flags va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_efl:Vale.X64.Flags.t)
(va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12 (va_upd_flags va_x_efl (va_upd_xmm 2
va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 va_s0)))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0,
va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b) (va_wpProof_Gctr_register alg
key round_keys keys_b))
//--
//-- Gctr_bytes_extra_work
val va_code_Gctr_bytes_extra_work : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_bytes_extra_work : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_bytes_extra_work : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->
icb_BE:quad32 -> in_b:buffer128 -> out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32)
-> keys_b:buffer128 -> orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_bytes_extra_work alg) va_s0 /\ va_get_ok va_s0 /\
(Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (aesni_enabled /\ sse_enabled) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys /\
(let num_blocks = num_bytes `op_Division` 16 in num_bytes `op_Modulus` 16 =!= 0 /\ va_get_reg64
rR9 va_s0 == orig_in_ptr + 16 `op_Multiply` num_blocks /\ va_get_reg64 rR10 va_s0 ==
orig_out_ptr + 16 `op_Multiply` num_blocks /\ Vale.AES.GCTR.gctr_partial_def alg num_blocks
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE /\ va_get_xmm 7
va_s0 == Vale.AES.GCTR_s.inc32 icb_BE num_blocks))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let num_blocks = num_bytes `op_Division` 16 in Vale.X64.Decls.modifies_buffer128 out_b
(va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\ FStar.Seq.Base.slice
#Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_sM) out_b) 0
num_blocks == FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq
(va_get_mem_heaplet 1 va_s0) out_b) 0 num_blocks /\ Vale.X64.Decls.buffer128_read out_b
num_blocks (va_get_mem_heaplet 1 va_sM) == Vale.AES.GCTR_s.gctr_encrypt_block icb_BE
(Vale.X64.Decls.buffer128_read in_b num_blocks (va_get_mem_heaplet 0 va_sM)) alg key num_blocks
/\ va_get_xmm 1 va_sM == Vale.X64.Decls.buffer128_read out_b num_blocks (va_get_mem_heaplet 1
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM
(va_update_xmm 4 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR12 va_sM (va_update_reg64 rRdx va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0))))))))))))
[@ va_qattr]
let va_wp_Gctr_bytes_extra_work (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64)
(orig_out_ptr:nat64) (num_bytes:nat) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr
in_b (Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (aesni_enabled /\ sse_enabled) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys /\
(let num_blocks = num_bytes `op_Division` 16 in num_bytes `op_Modulus` 16 =!= 0 /\ va_get_reg64
rR9 va_s0 == orig_in_ptr + 16 `op_Multiply` num_blocks /\ va_get_reg64 rR10 va_s0 ==
orig_out_ptr + 16 `op_Multiply` num_blocks /\ Vale.AES.GCTR.gctr_partial_def alg num_blocks
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE /\ va_get_xmm 7
va_s0 == Vale.AES.GCTR_s.inc32 icb_BE num_blocks)) /\ (forall (va_x_mem:vale_heap)
(va_x_rdx:nat64) (va_x_r12:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm4:quad32) (va_x_heap1:vale_heap) (va_x_efl:Vale.X64.Flags.t) . let va_sM =
va_upd_flags va_x_efl (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 2
va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRdx va_x_rdx (va_upd_mem va_x_mem va_s0)))))))) in va_get_ok va_sM /\ (let
num_blocks = num_bytes `op_Division` 16 in Vale.X64.Decls.modifies_buffer128 out_b
(va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\ FStar.Seq.Base.slice
#Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_sM) out_b) 0
num_blocks == FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq
(va_get_mem_heaplet 1 va_s0) out_b) 0 num_blocks /\ Vale.X64.Decls.buffer128_read out_b
num_blocks (va_get_mem_heaplet 1 va_sM) == Vale.AES.GCTR_s.gctr_encrypt_block icb_BE
(Vale.X64.Decls.buffer128_read in_b num_blocks (va_get_mem_heaplet 0 va_sM)) alg key num_blocks
/\ va_get_xmm 1 va_sM == Vale.X64.Decls.buffer128_read out_b num_blocks (va_get_mem_heaplet 1
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Gctr_bytes_extra_work : alg:algorithm -> icb_BE:quad32 -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->
orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys
keys_b orig_in_ptr orig_out_ptr num_bytes va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_bytes_extra_work alg)
([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 4; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_bytes_extra_work (alg:algorithm) (icb_BE:quad32) (in_b:buffer128)
(out_b:buffer128) (key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128)
(orig_in_ptr:nat64) (orig_out_ptr:nat64) (num_bytes:nat) : (va_quickCode unit
(va_code_Gctr_bytes_extra_work alg)) =
(va_QProc (va_code_Gctr_bytes_extra_work alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 4;
va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_mem])
(va_wp_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys keys_b orig_in_ptr
orig_out_ptr num_bytes) (va_wpProof_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys
keys_b orig_in_ptr orig_out_ptr num_bytes))
//--
//-- Gctr_bytes_no_extra
val va_code_Gctr_bytes_no_extra : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_bytes_no_extra : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_bytes_no_extra : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->
icb_BE:quad32 -> in_b:buffer128 -> out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32)
-> keys_b:buffer128 -> orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_bytes_no_extra alg) va_s0 /\ va_get_ok va_s0 /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg
key /\ FStar.Seq.Base.length #quad32 round_keys == Vale.AES.AES_common_s.nr alg + 1 /\
round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key /\ (let num_blocks = num_bytes
`op_Division` 16 in num_bytes `op_Modulus` 16 == 0 /\ Vale.AES.GCTR.gctr_partial_def alg
num_blocks (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ (let plain = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0
va_sM) in_b)) 0 num_bytes in let cipher = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1
va_sM) out_b)) 0 num_bytes in cipher == Vale.AES.GCTR_s.gctr_encrypt_LE icb_BE
(Vale.AES.GCTR.make_gctr_plain_LE plain) alg key) /\ va_state_eq va_sM (va_update_ok va_sM
va_s0)))
[@ va_qattr]
let va_wp_Gctr_bytes_no_extra (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64)
(orig_out_ptr:nat64) (num_bytes:nat) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg
key /\ FStar.Seq.Base.length #quad32 round_keys == Vale.AES.AES_common_s.nr alg + 1 /\
round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key /\ (let num_blocks = num_bytes
`op_Division` 16 in num_bytes `op_Modulus` 16 == 0 /\ Vale.AES.GCTR.gctr_partial_def alg
num_blocks (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE) /\ (let va_sM
= va_s0 in va_get_ok va_sM /\ Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1
va_s0) (va_get_mem_heaplet 1 va_sM) /\ (let plain = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0
va_sM) in_b)) 0 num_bytes in let cipher = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1
va_sM) out_b)) 0 num_bytes in cipher == Vale.AES.GCTR_s.gctr_encrypt_LE icb_BE
(Vale.AES.GCTR.make_gctr_plain_LE plain) alg key) ==> va_k va_sM (())))
val va_wpProof_Gctr_bytes_no_extra : alg:algorithm -> icb_BE:quad32 -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->
orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_bytes_no_extra alg icb_BE in_b out_b key round_keys
keys_b orig_in_ptr orig_out_ptr num_bytes va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_bytes_no_extra alg) ([]) va_s0
va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_bytes_no_extra (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64)
(orig_out_ptr:nat64) (num_bytes:nat) : (va_quickCode unit (va_code_Gctr_bytes_no_extra alg)) =
(va_QProc (va_code_Gctr_bytes_no_extra alg) ([]) (va_wp_Gctr_bytes_no_extra alg icb_BE in_b out_b
key round_keys keys_b orig_in_ptr orig_out_ptr num_bytes) (va_wpProof_Gctr_bytes_no_extra alg
icb_BE in_b out_b key round_keys keys_b orig_in_ptr orig_out_ptr num_bytes))
//--
//-- Gctr_bytes_stdcall
val va_code_Gctr_bytes_stdcall : win:bool -> alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_bytes_stdcall : win:bool -> alg:algorithm -> Tot va_pbool
let va_req_Gctr_bytes_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(in_b:buffer128) (num_bytes:nat64) (out_b:buffer128) (inout_b:buffer128) (keys_b:buffer128)
(ctr_b:buffer128) (num_blocks:nat64) (key:(seq nat32)) : prop =
(va_require_total va_b0 (va_code_Gctr_bytes_stdcall win alg) va_s0 /\ va_get_ok va_s0 /\ (let
(in_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (out_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (ctr_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack
va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (win
==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0))
/\ (win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 16)
(va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ num_bytes == (if win then va_get_reg64 rRdx va_s0 else
va_get_reg64 rRsi va_s0) /\ num_blocks == (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 32 + 8 + 16) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0)) /\ Vale.X64.Decls.buffers_disjoint128 in_b
out_b /\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\ (Vale.X64.Decls.buffers_disjoint128
in_b keys_b \/ in_b == keys_b) /\ Vale.X64.Decls.buffer_disjoints128 ctr_b ([in_b; out_b;
keys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([in_b; out_b; keys_b; ctr_b]) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in_ptr in_b num_blocks (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out_ptr out_b num_blocks
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128
(va_get_mem va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) ctr_ptr ctr_b 1 (va_get_mem_layout
va_s0) Secret /\ in_ptr + 16 `op_Multiply` num_blocks < pow2_64 /\ out_ptr + 16 `op_Multiply`
num_blocks < pow2_64 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
num_blocks /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 ctr_b == 1 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
inout_b == 1 /\ 256 `op_Multiply` Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b <
pow2_32 /\ 4096 `op_Multiply` num_blocks `op_Multiply` 16 < pow2_32 /\ (num_blocks
`op_Multiply` 128 `op_Division` 8 <= num_bytes /\ num_bytes < num_blocks `op_Multiply` 128
`op_Division` 8 + 128 `op_Division` 8) /\ (aesni_enabled /\ avx_enabled /\ sse_enabled) /\ (alg
= AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key))
let va_ens_Gctr_bytes_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(in_b:buffer128) (num_bytes:nat64) (out_b:buffer128) (inout_b:buffer128) (keys_b:buffer128)
(ctr_b:buffer128) (num_blocks:nat64) (key:(seq nat32)) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Gctr_bytes_stdcall va_b0 va_s0 win alg in_b num_bytes out_b inout_b keys_b ctr_b
num_blocks key /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (let
(in_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (out_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (ctr_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in Vale.X64.Decls.modifies_buffer128_2 out_b inout_b (va_get_mem va_s0)
(va_get_mem va_sM) /\ (let plain_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) inout_b)
in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_quads) 0 num_bytes in let cipher_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out_b)
(Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let cipher_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_quads) 0 num_bytes in
cipher_bytes == Vale.AES.GCTR_s.gctr_encrypt_LE (Vale.X64.Decls.buffer128_read ctr_b 0
(va_get_mem va_s0)) (Vale.AES.GCTR.make_gctr_plain_LE plain_bytes) alg key /\ va_get_reg64 rRsp
va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM (va_update_flags va_sM
(va_update_mem_layout va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1 va_sM
(va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM (va_update_xmm 12 va_sM
(va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM
(va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM
va_s0)))))))))))))))))))))))))))))))))))))))))
val va_lemma_Gctr_bytes_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> alg:algorithm ->
in_b:buffer128 -> num_bytes:nat64 -> out_b:buffer128 -> inout_b:buffer128 -> keys_b:buffer128 ->
ctr_b:buffer128 -> num_blocks:nat64 -> key:(seq nat32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_bytes_stdcall win alg) va_s0 /\ va_get_ok va_s0
/\ (let (in_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0
else va_get_reg64 rRdi va_s0) in let (out_ptr:(va_int_range 0 18446744073709551615)) = (if win
then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (ctr_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack
va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (win
==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0))
/\ (win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 16)
(va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ num_bytes == (if win then va_get_reg64 rRdx va_s0 else
va_get_reg64 rRsi va_s0) /\ num_blocks == (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 32 + 8 + 16) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0)) /\ Vale.X64.Decls.buffers_disjoint128 in_b
out_b /\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\ (Vale.X64.Decls.buffers_disjoint128
in_b keys_b \/ in_b == keys_b) /\ Vale.X64.Decls.buffer_disjoints128 ctr_b ([in_b; out_b;
keys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([in_b; out_b; keys_b; ctr_b]) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in_ptr in_b num_blocks (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out_ptr out_b num_blocks
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128
(va_get_mem va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) ctr_ptr ctr_b 1 (va_get_mem_layout
va_s0) Secret /\ in_ptr + 16 `op_Multiply` num_blocks < pow2_64 /\ out_ptr + 16 `op_Multiply`
num_blocks < pow2_64 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
num_blocks /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 ctr_b == 1 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
inout_b == 1 /\ 256 `op_Multiply` Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b <
pow2_32 /\ 4096 `op_Multiply` num_blocks `op_Multiply` 16 < pow2_32 /\ (num_blocks
`op_Multiply` 128 `op_Division` 8 <= num_bytes /\ num_bytes < num_blocks `op_Multiply` 128
`op_Division` 8 + 128 `op_Division` 8) /\ (aesni_enabled /\ avx_enabled /\ sse_enabled) /\ (alg
= AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (in_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (out_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (ctr_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in Vale.X64.Decls.modifies_buffer128_2 out_b inout_b (va_get_mem va_s0)
(va_get_mem va_sM) /\ (let plain_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) inout_b)
in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_quads) 0 num_bytes in let cipher_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out_b)
(Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let cipher_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_quads) 0 num_bytes in
cipher_bytes == Vale.AES.GCTR_s.gctr_encrypt_LE (Vale.X64.Decls.buffer128_read ctr_b 0
(va_get_mem va_s0)) (Vale.AES.GCTR.make_gctr_plain_LE plain_bytes) alg key /\ va_get_reg64 rRsp
va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM (va_update_flags va_sM
(va_update_mem_layout va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1 va_sM
(va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM (va_update_xmm 12 va_sM
(va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM
(va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM
va_s0))))))))))))))))))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_bytes_stdcall (win:bool) (alg:algorithm) (in_b:buffer128) (num_bytes:nat64)
(out_b:buffer128) (inout_b:buffer128) (keys_b:buffer128) (ctr_b:buffer128) (num_blocks:nat64)
(key:(seq nat32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (in_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let (out_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64
rRdx va_s0) in let (inout_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64 rRcx va_s0) in let (keys_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun _ -> va_get_reg64 rR8 va_s0) in let
(ctr_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)) (fun
_ -> va_get_reg64 rR9 va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem
va_s0) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0)
(va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64
rRsp va_s0 + 32 + 8 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ num_bytes == va_if win (fun _ -> va_get_reg64 rRdx va_s0)
(fun _ -> va_get_reg64 rRsi va_s0) /\ num_blocks == va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 16) (va_get_stack va_s0))
(fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0)) /\
Vale.X64.Decls.buffers_disjoint128 in_b out_b /\ Vale.X64.Decls.buffers_disjoint128 keys_b
out_b /\ (Vale.X64.Decls.buffers_disjoint128 in_b keys_b \/ in_b == keys_b) /\
Vale.X64.Decls.buffer_disjoints128 ctr_b ([in_b; out_b; keys_b]) /\
Vale.X64.Decls.buffer_disjoints128 inout_b ([in_b; out_b; keys_b; ctr_b]) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in_ptr in_b num_blocks (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out_ptr out_b num_blocks
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128
(va_get_mem va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) ctr_ptr ctr_b 1 (va_get_mem_layout
va_s0) Secret /\ in_ptr + 16 `op_Multiply` num_blocks < pow2_64 /\ out_ptr + 16 `op_Multiply`
num_blocks < pow2_64 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
num_blocks /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 ctr_b == 1 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
inout_b == 1 /\ 256 `op_Multiply` Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b <
pow2_32 /\ 4096 `op_Multiply` num_blocks `op_Multiply` 16 < pow2_32 /\ (num_blocks
`op_Multiply` 128 `op_Division` 8 <= num_bytes /\ num_bytes < num_blocks `op_Multiply` 128
`op_Division` 8 + 128 `op_Division` 8) /\ (aesni_enabled /\ avx_enabled /\ sse_enabled) /\ (alg
= AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64)
(va_x_rbx:nat64) (va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rdi:nat64) (va_x_rsi:nat64)
(va_x_rsp:nat64) (va_x_rbp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64)
(va_x_r11:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_heap1:vale_heap) (va_x_heap2:vale_heap)
(va_x_memLayout:vale_heap_layout) (va_x_efl:Vale.X64.Flags.t) (va_x_stack:vale_stack)
(va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint va_x_stackTaint (va_upd_stack
va_x_stack (va_upd_flags va_x_efl (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 2
va_x_heap2 (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14 va_x_xmm14
(va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12 va_x_xmm12 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 10
va_x_xmm10 (va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm
6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm
2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR15 va_x_r15
(va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64
rR8 va_x_r8 (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64 rRsi va_x_rsi
(va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx va_x_rcx
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem
va_s0)))))))))))))))))))))))))))))))))))))) in va_get_ok va_sM /\ (let (in_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64
rRdi va_s0) in let (out_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64
rRcx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun
_ -> va_get_reg64 rR8 va_s0) in let (ctr_ptr:(va_int_range 0 18446744073709551615)) = va_if win
(fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack
va_s0)) (fun _ -> va_get_reg64 rR9 va_s0) in Vale.X64.Decls.modifies_buffer128_2 out_b inout_b
(va_get_mem va_s0) (va_get_mem va_sM) /\ (let plain_quads = FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_quads) 0 num_bytes in let cipher_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out_b)
(Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let cipher_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_quads) 0 num_bytes in
cipher_bytes == Vale.AES.GCTR_s.gctr_encrypt_LE (Vale.X64.Decls.buffer128_read ctr_b 0
(va_get_mem va_s0)) (Vale.AES.GCTR.make_gctr_plain_LE plain_bytes) alg key /\ va_get_reg64 rRsp
va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) ==>
va_k va_sM (())))
val va_wpProof_Gctr_bytes_stdcall : win:bool -> alg:algorithm -> in_b:buffer128 -> num_bytes:nat64
-> out_b:buffer128 -> inout_b:buffer128 -> keys_b:buffer128 -> ctr_b:buffer128 ->
num_blocks:nat64 -> key:(seq nat32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_bytes_stdcall win alg in_b num_bytes out_b inout_b
keys_b ctr_b num_blocks key va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_bytes_stdcall win alg)
([va_Mod_stackTaint; va_Mod_stack; va_Mod_flags; va_Mod_mem_layout; va_Mod_mem_heaplet 2;
va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm
11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5;
va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15;
va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10;
va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64 rRsp; va_Mod_reg64 rRsi;
va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax;
va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_bytes_stdcall (win:bool) (alg:algorithm) (in_b:buffer128) (num_bytes:nat64)
(out_b:buffer128) (inout_b:buffer128) (keys_b:buffer128) (ctr_b:buffer128) (num_blocks:nat64) | false | false | Vale.AES.X64.GCTR.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Gctr_bytes_stdcall
(win: bool)
(alg: algorithm)
(in_b: buffer128)
(num_bytes: nat64)
(out_b inout_b keys_b ctr_b: buffer128)
(num_blocks: nat64)
(key: (seq nat32))
: (va_quickCode unit (va_code_Gctr_bytes_stdcall win alg)) | [] | Vale.AES.X64.GCTR.va_quick_Gctr_bytes_stdcall | {
"file_name": "obj/Vale.AES.X64.GCTR.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
win: Prims.bool ->
alg: Vale.AES.AES_common_s.algorithm ->
in_b: Vale.X64.Memory.buffer128 ->
num_bytes: Vale.X64.Memory.nat64 ->
out_b: Vale.X64.Memory.buffer128 ->
inout_b: Vale.X64.Memory.buffer128 ->
keys_b: Vale.X64.Memory.buffer128 ->
ctr_b: Vale.X64.Memory.buffer128 ->
num_blocks: Vale.X64.Memory.nat64 ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.AES.X64.GCTR.va_code_Gctr_bytes_stdcall win alg) | {
"end_col": 62,
"end_line": 641,
"start_col": 2,
"start_line": 632
} |
Prims.Tot | val va_ens_Gctr_bytes_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(alg: algorithm)
(in_b: buffer128)
(num_bytes: nat64)
(out_b inout_b keys_b ctr_b: buffer128)
(num_blocks: nat64)
(key: (seq nat32))
(va_sM: va_state)
(va_fM: va_fuel)
: prop | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_ens_Gctr_bytes_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(in_b:buffer128) (num_bytes:nat64) (out_b:buffer128) (inout_b:buffer128) (keys_b:buffer128)
(ctr_b:buffer128) (num_blocks:nat64) (key:(seq nat32)) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Gctr_bytes_stdcall va_b0 va_s0 win alg in_b num_bytes out_b inout_b keys_b ctr_b
num_blocks key /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (let
(in_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (out_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (ctr_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in Vale.X64.Decls.modifies_buffer128_2 out_b inout_b (va_get_mem va_s0)
(va_get_mem va_sM) /\ (let plain_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) inout_b)
in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_quads) 0 num_bytes in let cipher_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out_b)
(Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let cipher_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_quads) 0 num_bytes in
cipher_bytes == Vale.AES.GCTR_s.gctr_encrypt_LE (Vale.X64.Decls.buffer128_read ctr_b 0
(va_get_mem va_s0)) (Vale.AES.GCTR.make_gctr_plain_LE plain_bytes) alg key /\ va_get_reg64 rRsp
va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM (va_update_flags va_sM
(va_update_mem_layout va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1 va_sM
(va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM (va_update_xmm 12 va_sM
(va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM
(va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM
va_s0))))))))))))))))))))))))))))))))))))))))) | val va_ens_Gctr_bytes_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(alg: algorithm)
(in_b: buffer128)
(num_bytes: nat64)
(out_b inout_b keys_b ctr_b: buffer128)
(num_blocks: nat64)
(key: (seq nat32))
(va_sM: va_state)
(va_fM: va_fuel)
: prop
let va_ens_Gctr_bytes_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(alg: algorithm)
(in_b: buffer128)
(num_bytes: nat64)
(out_b inout_b keys_b ctr_b: buffer128)
(num_blocks: nat64)
(key: (seq nat32))
(va_sM: va_state)
(va_fM: va_fuel)
: prop = | false | null | false | (va_req_Gctr_bytes_stdcall va_b0 va_s0 win alg in_b num_bytes out_b inout_b keys_b ctr_b num_blocks
key /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let in_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0)
in
let out_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0)
in
let inout_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0)
in
let keys_ptr:(va_int_range 0 18446744073709551615) =
(if win
then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)
else va_get_reg64 rR8 va_s0)
in
let ctr_ptr:(va_int_range 0 18446744073709551615) =
(if win
then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)
else va_get_reg64 rR9 va_s0)
in
Vale.X64.Decls.modifies_buffer128_2 out_b inout_b (va_get_mem va_s0) (va_get_mem va_sM) /\
(let plain_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem va_s0) in_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) inout_b)
in
let plain_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_quads)
0
num_bytes
in
let cipher_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem va_sM) out_b)
(Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b)
in
let cipher_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_quads)
0
num_bytes
in
cipher_bytes ==
Vale.AES.GCTR_s.gctr_encrypt_LE (Vale.X64.Decls.buffer128_read ctr_b 0 (va_get_mem va_s0))
(Vale.AES.GCTR.make_gctr_plain_LE plain_bytes)
alg
key /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\
(win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\
(win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\
(win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\
(win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\
(win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\
(win ==> va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\
(win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\
(win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\
(win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\
(win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\
(win ==> va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\
(win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\
(win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\
(~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\
(~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM
(va_update_stackTaint va_sM
(va_update_stack va_sM
(va_update_flags va_sM
(va_update_mem_layout va_sM
(va_update_mem_heaplet 2
va_sM
(va_update_mem_heaplet 1
va_sM
(va_update_xmm 15
va_sM
(va_update_xmm 14
va_sM
(va_update_xmm 13
va_sM
(va_update_xmm 12
va_sM
(va_update_xmm 11
va_sM
(va_update_xmm 10
va_sM
(va_update_xmm 9
va_sM
(va_update_xmm 8
va_sM
(va_update_xmm 7
va_sM
(va_update_xmm 6
va_sM
(va_update_xmm 5
va_sM
(va_update_xmm 4
va_sM
(va_update_xmm 3
va_sM
(va_update_xmm 2
va_sM
(va_update_xmm 1
va_sM
(va_update_xmm
0
va_sM
(va_update_reg64
rR15
va_sM
(va_update_reg64
rR14
va_sM
(
va_update_reg64
rR13
va_sM
(
va_update_reg64
rR12
va_sM
(
va_update_reg64
rR11
va_sM
(
va_update_reg64
rR10
va_sM
(
va_update_reg64
rR9
va_sM
(
va_update_reg64
rR8
va_sM
(
va_update_reg64
rRbp
va_sM
(
va_update_reg64
rRsp
va_sM
(
va_update_reg64
rRsi
va_sM
(
va_update_reg64
rRdi
va_sM
(
va_update_reg64
rRdx
va_sM
(
va_update_reg64
rRcx
va_sM
(
va_update_reg64
rRbx
va_sM
(
va_update_reg64
rRax
va_sM
(
va_update_ok
va_sM
(
va_update_mem
va_sM
va_s0
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
))
))))))))))))
))))))))))) | {
"checked_file": "Vale.AES.X64.GCTR.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.AESCTRplain.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCTR.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_code",
"Vale.X64.Decls.va_state",
"Prims.bool",
"Vale.AES.AES_common_s.algorithm",
"Vale.X64.Memory.buffer128",
"Vale.X64.Memory.nat64",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.Decls.va_fuel",
"Prims.l_and",
"Vale.AES.X64.GCTR.va_req_Gctr_bytes_stdcall",
"Vale.X64.Decls.va_ensure_total",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.Decls.modifies_buffer128_2",
"Vale.X64.Decls.va_get_mem",
"Prims.eq2",
"Vale.Def.Types_s.nat8",
"Vale.AES.GCTR_s.gctr_encrypt_LE",
"Vale.X64.Decls.buffer128_read",
"Vale.AES.GCTR.make_gctr_plain_LE",
"Vale.Def.Types_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Prims.l_imp",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR15",
"Vale.X64.Decls.quad32",
"Vale.X64.Decls.va_get_xmm",
"Prims.l_not",
"Vale.Def.Words_s.nat8",
"FStar.Seq.Base.slice",
"Vale.Def.Types_s.le_seq_quad32_to_bytes",
"Vale.Def.Types_s.quad32",
"FStar.Seq.Base.append",
"Vale.X64.Decls.s128",
"Vale.X64.Decls.va_int_range",
"Vale.X64.Stack_i.load_stack64",
"Prims.op_Addition",
"Vale.X64.Decls.va_get_stack",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Decls.va_state_eq",
"Vale.X64.Decls.va_update_stackTaint",
"Vale.X64.Decls.va_update_stack",
"Vale.X64.Decls.va_update_flags",
"Vale.X64.Decls.va_update_mem_layout",
"Vale.X64.Decls.va_update_mem_heaplet",
"Vale.X64.Decls.va_update_xmm",
"Vale.X64.Decls.va_update_reg64",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rRax",
"Vale.X64.Decls.va_update_ok",
"Vale.X64.Decls.va_update_mem",
"Prims.prop"
] | [] | module Vale.AES.X64.GCTR
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open FStar.Seq
open Vale.AES.AES_s
open Vale.AES.X64.AES
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM_helpers
open Vale.Poly1305.Math
open Vale.Def.Words.Two_s
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.AESCTRplain
open Vale.X64.CPU_Features_s
#reset-options "--z3rlimit 30"
//-- Inc32
val va_code_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_code
val va_codegen_success_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_pbool
val va_lemma_Inc32 : va_b0:va_code -> va_s0:va_state -> dst:va_operand_xmm -> one:va_operand_xmm
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Inc32 dst one) va_s0 /\ va_is_dst_xmm dst va_s0 /\
va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\ va_eval_xmm va_s0 one ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_xmm va_sM dst == Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 /\ va_state_eq va_sM
(va_update_flags va_sM (va_update_ok va_sM (va_update_operand_xmm dst va_sM va_s0)))))
[@ va_qattr]
let va_wp_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_is_dst_xmm dst va_s0 /\ va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\
va_eval_xmm va_s0 one == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0 /\ (forall
(va_x_dst:va_value_xmm) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl
(va_upd_operand_xmm dst va_x_dst va_s0) in va_get_ok va_sM /\ va_eval_xmm va_sM dst ==
Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 ==> va_k va_sM (())))
val va_wpProof_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> va_s0:va_state -> va_k:(va_state
-> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Inc32 dst one va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Inc32 dst one) ([va_Mod_flags;
va_mod_xmm dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) : (va_quickCode unit (va_code_Inc32
dst one)) =
(va_QProc (va_code_Inc32 dst one) ([va_Mod_flags; va_mod_xmm dst]) (va_wp_Inc32 dst one)
(va_wpProof_Inc32 dst one))
//--
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12
va_sM (va_update_flags va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_efl:Vale.X64.Flags.t)
(va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12 (va_upd_flags va_x_efl (va_upd_xmm 2
va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 va_s0)))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0,
va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b) (va_wpProof_Gctr_register alg
key round_keys keys_b))
//--
//-- Gctr_bytes_extra_work
val va_code_Gctr_bytes_extra_work : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_bytes_extra_work : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_bytes_extra_work : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->
icb_BE:quad32 -> in_b:buffer128 -> out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32)
-> keys_b:buffer128 -> orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_bytes_extra_work alg) va_s0 /\ va_get_ok va_s0 /\
(Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (aesni_enabled /\ sse_enabled) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys /\
(let num_blocks = num_bytes `op_Division` 16 in num_bytes `op_Modulus` 16 =!= 0 /\ va_get_reg64
rR9 va_s0 == orig_in_ptr + 16 `op_Multiply` num_blocks /\ va_get_reg64 rR10 va_s0 ==
orig_out_ptr + 16 `op_Multiply` num_blocks /\ Vale.AES.GCTR.gctr_partial_def alg num_blocks
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE /\ va_get_xmm 7
va_s0 == Vale.AES.GCTR_s.inc32 icb_BE num_blocks))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let num_blocks = num_bytes `op_Division` 16 in Vale.X64.Decls.modifies_buffer128 out_b
(va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\ FStar.Seq.Base.slice
#Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_sM) out_b) 0
num_blocks == FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq
(va_get_mem_heaplet 1 va_s0) out_b) 0 num_blocks /\ Vale.X64.Decls.buffer128_read out_b
num_blocks (va_get_mem_heaplet 1 va_sM) == Vale.AES.GCTR_s.gctr_encrypt_block icb_BE
(Vale.X64.Decls.buffer128_read in_b num_blocks (va_get_mem_heaplet 0 va_sM)) alg key num_blocks
/\ va_get_xmm 1 va_sM == Vale.X64.Decls.buffer128_read out_b num_blocks (va_get_mem_heaplet 1
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM
(va_update_xmm 4 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR12 va_sM (va_update_reg64 rRdx va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0))))))))))))
[@ va_qattr]
let va_wp_Gctr_bytes_extra_work (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64)
(orig_out_ptr:nat64) (num_bytes:nat) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr
in_b (Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (aesni_enabled /\ sse_enabled) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys /\
(let num_blocks = num_bytes `op_Division` 16 in num_bytes `op_Modulus` 16 =!= 0 /\ va_get_reg64
rR9 va_s0 == orig_in_ptr + 16 `op_Multiply` num_blocks /\ va_get_reg64 rR10 va_s0 ==
orig_out_ptr + 16 `op_Multiply` num_blocks /\ Vale.AES.GCTR.gctr_partial_def alg num_blocks
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE /\ va_get_xmm 7
va_s0 == Vale.AES.GCTR_s.inc32 icb_BE num_blocks)) /\ (forall (va_x_mem:vale_heap)
(va_x_rdx:nat64) (va_x_r12:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm4:quad32) (va_x_heap1:vale_heap) (va_x_efl:Vale.X64.Flags.t) . let va_sM =
va_upd_flags va_x_efl (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 2
va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRdx va_x_rdx (va_upd_mem va_x_mem va_s0)))))))) in va_get_ok va_sM /\ (let
num_blocks = num_bytes `op_Division` 16 in Vale.X64.Decls.modifies_buffer128 out_b
(va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\ FStar.Seq.Base.slice
#Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_sM) out_b) 0
num_blocks == FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq
(va_get_mem_heaplet 1 va_s0) out_b) 0 num_blocks /\ Vale.X64.Decls.buffer128_read out_b
num_blocks (va_get_mem_heaplet 1 va_sM) == Vale.AES.GCTR_s.gctr_encrypt_block icb_BE
(Vale.X64.Decls.buffer128_read in_b num_blocks (va_get_mem_heaplet 0 va_sM)) alg key num_blocks
/\ va_get_xmm 1 va_sM == Vale.X64.Decls.buffer128_read out_b num_blocks (va_get_mem_heaplet 1
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Gctr_bytes_extra_work : alg:algorithm -> icb_BE:quad32 -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->
orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys
keys_b orig_in_ptr orig_out_ptr num_bytes va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_bytes_extra_work alg)
([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 4; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_bytes_extra_work (alg:algorithm) (icb_BE:quad32) (in_b:buffer128)
(out_b:buffer128) (key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128)
(orig_in_ptr:nat64) (orig_out_ptr:nat64) (num_bytes:nat) : (va_quickCode unit
(va_code_Gctr_bytes_extra_work alg)) =
(va_QProc (va_code_Gctr_bytes_extra_work alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 4;
va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_mem])
(va_wp_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys keys_b orig_in_ptr
orig_out_ptr num_bytes) (va_wpProof_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys
keys_b orig_in_ptr orig_out_ptr num_bytes))
//--
//-- Gctr_bytes_no_extra
val va_code_Gctr_bytes_no_extra : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_bytes_no_extra : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_bytes_no_extra : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->
icb_BE:quad32 -> in_b:buffer128 -> out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32)
-> keys_b:buffer128 -> orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_bytes_no_extra alg) va_s0 /\ va_get_ok va_s0 /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg
key /\ FStar.Seq.Base.length #quad32 round_keys == Vale.AES.AES_common_s.nr alg + 1 /\
round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key /\ (let num_blocks = num_bytes
`op_Division` 16 in num_bytes `op_Modulus` 16 == 0 /\ Vale.AES.GCTR.gctr_partial_def alg
num_blocks (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ (let plain = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0
va_sM) in_b)) 0 num_bytes in let cipher = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1
va_sM) out_b)) 0 num_bytes in cipher == Vale.AES.GCTR_s.gctr_encrypt_LE icb_BE
(Vale.AES.GCTR.make_gctr_plain_LE plain) alg key) /\ va_state_eq va_sM (va_update_ok va_sM
va_s0)))
[@ va_qattr]
let va_wp_Gctr_bytes_no_extra (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64)
(orig_out_ptr:nat64) (num_bytes:nat) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg
key /\ FStar.Seq.Base.length #quad32 round_keys == Vale.AES.AES_common_s.nr alg + 1 /\
round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key /\ (let num_blocks = num_bytes
`op_Division` 16 in num_bytes `op_Modulus` 16 == 0 /\ Vale.AES.GCTR.gctr_partial_def alg
num_blocks (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE) /\ (let va_sM
= va_s0 in va_get_ok va_sM /\ Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1
va_s0) (va_get_mem_heaplet 1 va_sM) /\ (let plain = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0
va_sM) in_b)) 0 num_bytes in let cipher = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1
va_sM) out_b)) 0 num_bytes in cipher == Vale.AES.GCTR_s.gctr_encrypt_LE icb_BE
(Vale.AES.GCTR.make_gctr_plain_LE plain) alg key) ==> va_k va_sM (())))
val va_wpProof_Gctr_bytes_no_extra : alg:algorithm -> icb_BE:quad32 -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->
orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_bytes_no_extra alg icb_BE in_b out_b key round_keys
keys_b orig_in_ptr orig_out_ptr num_bytes va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_bytes_no_extra alg) ([]) va_s0
va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_bytes_no_extra (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64)
(orig_out_ptr:nat64) (num_bytes:nat) : (va_quickCode unit (va_code_Gctr_bytes_no_extra alg)) =
(va_QProc (va_code_Gctr_bytes_no_extra alg) ([]) (va_wp_Gctr_bytes_no_extra alg icb_BE in_b out_b
key round_keys keys_b orig_in_ptr orig_out_ptr num_bytes) (va_wpProof_Gctr_bytes_no_extra alg
icb_BE in_b out_b key round_keys keys_b orig_in_ptr orig_out_ptr num_bytes))
//--
//-- Gctr_bytes_stdcall
val va_code_Gctr_bytes_stdcall : win:bool -> alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_bytes_stdcall : win:bool -> alg:algorithm -> Tot va_pbool
let va_req_Gctr_bytes_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(in_b:buffer128) (num_bytes:nat64) (out_b:buffer128) (inout_b:buffer128) (keys_b:buffer128)
(ctr_b:buffer128) (num_blocks:nat64) (key:(seq nat32)) : prop =
(va_require_total va_b0 (va_code_Gctr_bytes_stdcall win alg) va_s0 /\ va_get_ok va_s0 /\ (let
(in_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (out_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (ctr_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack
va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (win
==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0))
/\ (win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 16)
(va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ num_bytes == (if win then va_get_reg64 rRdx va_s0 else
va_get_reg64 rRsi va_s0) /\ num_blocks == (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 32 + 8 + 16) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0)) /\ Vale.X64.Decls.buffers_disjoint128 in_b
out_b /\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\ (Vale.X64.Decls.buffers_disjoint128
in_b keys_b \/ in_b == keys_b) /\ Vale.X64.Decls.buffer_disjoints128 ctr_b ([in_b; out_b;
keys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([in_b; out_b; keys_b; ctr_b]) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in_ptr in_b num_blocks (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out_ptr out_b num_blocks
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128
(va_get_mem va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) ctr_ptr ctr_b 1 (va_get_mem_layout
va_s0) Secret /\ in_ptr + 16 `op_Multiply` num_blocks < pow2_64 /\ out_ptr + 16 `op_Multiply`
num_blocks < pow2_64 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
num_blocks /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 ctr_b == 1 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
inout_b == 1 /\ 256 `op_Multiply` Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b <
pow2_32 /\ 4096 `op_Multiply` num_blocks `op_Multiply` 16 < pow2_32 /\ (num_blocks
`op_Multiply` 128 `op_Division` 8 <= num_bytes /\ num_bytes < num_blocks `op_Multiply` 128
`op_Division` 8 + 128 `op_Division` 8) /\ (aesni_enabled /\ avx_enabled /\ sse_enabled) /\ (alg
= AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key))
let va_ens_Gctr_bytes_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(in_b:buffer128) (num_bytes:nat64) (out_b:buffer128) (inout_b:buffer128) (keys_b:buffer128) | false | true | Vale.AES.X64.GCTR.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_ens_Gctr_bytes_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(alg: algorithm)
(in_b: buffer128)
(num_bytes: nat64)
(out_b inout_b keys_b ctr_b: buffer128)
(num_blocks: nat64)
(key: (seq nat32))
(va_sM: va_state)
(va_fM: va_fuel)
: prop | [] | Vale.AES.X64.GCTR.va_ens_Gctr_bytes_stdcall | {
"file_name": "obj/Vale.AES.X64.GCTR.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
va_b0: Vale.X64.Decls.va_code ->
va_s0: Vale.X64.Decls.va_state ->
win: Prims.bool ->
alg: Vale.AES.AES_common_s.algorithm ->
in_b: Vale.X64.Memory.buffer128 ->
num_bytes: Vale.X64.Memory.nat64 ->
out_b: Vale.X64.Memory.buffer128 ->
inout_b: Vale.X64.Memory.buffer128 ->
keys_b: Vale.X64.Memory.buffer128 ->
ctr_b: Vale.X64.Memory.buffer128 ->
num_blocks: Vale.X64.Memory.nat64 ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->
va_sM: Vale.X64.Decls.va_state ->
va_fM: Vale.X64.Decls.va_fuel
-> Prims.prop | {
"end_col": 50,
"end_line": 413,
"start_col": 2,
"start_line": 365
} |
Prims.Tot | val va_wp_Gctr_bytes_stdcall
(win: bool)
(alg: algorithm)
(in_b: buffer128)
(num_bytes: nat64)
(out_b inout_b keys_b ctr_b: buffer128)
(num_blocks: nat64)
(key: (seq nat32))
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESCTRplain",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Gctr_bytes_stdcall (win:bool) (alg:algorithm) (in_b:buffer128) (num_bytes:nat64)
(out_b:buffer128) (inout_b:buffer128) (keys_b:buffer128) (ctr_b:buffer128) (num_blocks:nat64)
(key:(seq nat32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (in_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let (out_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64
rRdx va_s0) in let (inout_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64 rRcx va_s0) in let (keys_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun _ -> va_get_reg64 rR8 va_s0) in let
(ctr_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)) (fun
_ -> va_get_reg64 rR9 va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem
va_s0) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0)
(va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64
rRsp va_s0 + 32 + 8 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ num_bytes == va_if win (fun _ -> va_get_reg64 rRdx va_s0)
(fun _ -> va_get_reg64 rRsi va_s0) /\ num_blocks == va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 16) (va_get_stack va_s0))
(fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0)) /\
Vale.X64.Decls.buffers_disjoint128 in_b out_b /\ Vale.X64.Decls.buffers_disjoint128 keys_b
out_b /\ (Vale.X64.Decls.buffers_disjoint128 in_b keys_b \/ in_b == keys_b) /\
Vale.X64.Decls.buffer_disjoints128 ctr_b ([in_b; out_b; keys_b]) /\
Vale.X64.Decls.buffer_disjoints128 inout_b ([in_b; out_b; keys_b; ctr_b]) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in_ptr in_b num_blocks (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out_ptr out_b num_blocks
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128
(va_get_mem va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) ctr_ptr ctr_b 1 (va_get_mem_layout
va_s0) Secret /\ in_ptr + 16 `op_Multiply` num_blocks < pow2_64 /\ out_ptr + 16 `op_Multiply`
num_blocks < pow2_64 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
num_blocks /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 ctr_b == 1 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
inout_b == 1 /\ 256 `op_Multiply` Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b <
pow2_32 /\ 4096 `op_Multiply` num_blocks `op_Multiply` 16 < pow2_32 /\ (num_blocks
`op_Multiply` 128 `op_Division` 8 <= num_bytes /\ num_bytes < num_blocks `op_Multiply` 128
`op_Division` 8 + 128 `op_Division` 8) /\ (aesni_enabled /\ avx_enabled /\ sse_enabled) /\ (alg
= AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64)
(va_x_rbx:nat64) (va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rdi:nat64) (va_x_rsi:nat64)
(va_x_rsp:nat64) (va_x_rbp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64)
(va_x_r11:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_heap1:vale_heap) (va_x_heap2:vale_heap)
(va_x_memLayout:vale_heap_layout) (va_x_efl:Vale.X64.Flags.t) (va_x_stack:vale_stack)
(va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint va_x_stackTaint (va_upd_stack
va_x_stack (va_upd_flags va_x_efl (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 2
va_x_heap2 (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14 va_x_xmm14
(va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12 va_x_xmm12 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 10
va_x_xmm10 (va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm
6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm
2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR15 va_x_r15
(va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64
rR8 va_x_r8 (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64 rRsi va_x_rsi
(va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx va_x_rcx
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem
va_s0)))))))))))))))))))))))))))))))))))))) in va_get_ok va_sM /\ (let (in_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64
rRdi va_s0) in let (out_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64
rRcx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun
_ -> va_get_reg64 rR8 va_s0) in let (ctr_ptr:(va_int_range 0 18446744073709551615)) = va_if win
(fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack
va_s0)) (fun _ -> va_get_reg64 rR9 va_s0) in Vale.X64.Decls.modifies_buffer128_2 out_b inout_b
(va_get_mem va_s0) (va_get_mem va_sM) /\ (let plain_quads = FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_quads) 0 num_bytes in let cipher_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out_b)
(Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let cipher_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_quads) 0 num_bytes in
cipher_bytes == Vale.AES.GCTR_s.gctr_encrypt_LE (Vale.X64.Decls.buffer128_read ctr_b 0
(va_get_mem va_s0)) (Vale.AES.GCTR.make_gctr_plain_LE plain_bytes) alg key /\ va_get_reg64 rRsp
va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) ==>
va_k va_sM (()))) | val va_wp_Gctr_bytes_stdcall
(win: bool)
(alg: algorithm)
(in_b: buffer128)
(num_bytes: nat64)
(out_b inout_b keys_b ctr_b: buffer128)
(num_blocks: nat64)
(key: (seq nat32))
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_Gctr_bytes_stdcall
(win: bool)
(alg: algorithm)
(in_b: buffer128)
(num_bytes: nat64)
(out_b inout_b keys_b ctr_b: buffer128)
(num_blocks: nat64)
(key: (seq nat32))
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_get_ok va_s0 /\
(let in_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0)
in
let out_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0)
in
let inout_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64 rRcx va_s0)
in
let keys_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0)
(va_get_stack va_s0))
(fun _ -> va_get_reg64 rR8 va_s0)
in
let ctr_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8)
(va_get_stack va_s0))
(fun _ -> va_get_reg64 rR9 va_s0)
in
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 16)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 0)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
num_bytes == va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0) /\
num_blocks ==
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 16) (va_get_stack va_s0)
)
(fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0)) /\
Vale.X64.Decls.buffers_disjoint128 in_b out_b /\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\
(Vale.X64.Decls.buffers_disjoint128 in_b keys_b \/ in_b == keys_b) /\
Vale.X64.Decls.buffer_disjoints128 ctr_b ([in_b; out_b; keys_b]) /\
Vale.X64.Decls.buffer_disjoints128 inout_b ([in_b; out_b; keys_b; ctr_b]) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in_ptr
in_b
num_blocks
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out_ptr
out_b
num_blocks
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr
inout_b
1
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
keys_ptr
keys_b
(Vale.AES.AES_common_s.nr alg + 1)
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
ctr_ptr
ctr_b
1
(va_get_mem_layout va_s0)
Secret /\ in_ptr + 16 `op_Multiply` num_blocks < pow2_64 /\
out_ptr + 16 `op_Multiply` num_blocks < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b == num_blocks /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 ctr_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
256 `op_Multiply` (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b) < pow2_32 /\
(4096 `op_Multiply` num_blocks) `op_Multiply` 16 < pow2_32 /\
((num_blocks `op_Multiply` 128) `op_Division` 8 <= num_bytes /\
num_bytes < (num_blocks `op_Multiply` 128) `op_Division` 8 + 128 `op_Division` 8) /\
(aesni_enabled /\ avx_enabled /\ sse_enabled) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key) /\
(forall (va_x_mem: vale_heap) (va_x_rax: nat64) (va_x_rbx: nat64) (va_x_rcx: nat64)
(va_x_rdx: nat64) (va_x_rdi: nat64) (va_x_rsi: nat64) (va_x_rsp: nat64) (va_x_rbp: nat64)
(va_x_r8: nat64) (va_x_r9: nat64) (va_x_r10: nat64) (va_x_r11: nat64) (va_x_r12: nat64)
(va_x_r13: nat64) (va_x_r14: nat64) (va_x_r15: nat64) (va_x_xmm0: quad32) (va_x_xmm1: quad32)
(va_x_xmm2: quad32) (va_x_xmm3: quad32) (va_x_xmm4: quad32) (va_x_xmm5: quad32)
(va_x_xmm6: quad32) (va_x_xmm7: quad32) (va_x_xmm8: quad32) (va_x_xmm9: quad32)
(va_x_xmm10: quad32) (va_x_xmm11: quad32) (va_x_xmm12: quad32) (va_x_xmm13: quad32)
(va_x_xmm14: quad32) (va_x_xmm15: quad32) (va_x_heap1: vale_heap) (va_x_heap2: vale_heap)
(va_x_memLayout: vale_heap_layout) (va_x_efl: Vale.X64.Flags.t) (va_x_stack: vale_stack)
(va_x_stackTaint: memtaint).
let va_sM =
va_upd_stackTaint va_x_stackTaint
(va_upd_stack va_x_stack
(va_upd_flags va_x_efl
(va_upd_mem_layout va_x_memLayout
(va_upd_mem_heaplet 2
va_x_heap2
(va_upd_mem_heaplet 1
va_x_heap1
(va_upd_xmm 15
va_x_xmm15
(va_upd_xmm 14
va_x_xmm14
(va_upd_xmm 13
va_x_xmm13
(va_upd_xmm 12
va_x_xmm12
(va_upd_xmm 11
va_x_xmm11
(va_upd_xmm 10
va_x_xmm10
(va_upd_xmm 9
va_x_xmm9
(va_upd_xmm 8
va_x_xmm8
(va_upd_xmm 7
va_x_xmm7
(va_upd_xmm 6
va_x_xmm6
(va_upd_xmm 5
va_x_xmm5
(va_upd_xmm 4
va_x_xmm4
(va_upd_xmm 3
va_x_xmm3
(va_upd_xmm 2
va_x_xmm2
(va_upd_xmm 1
va_x_xmm1
(va_upd_xmm
0
va_x_xmm0
(va_upd_reg64
rR15
va_x_r15
(va_upd_reg64
rR14
va_x_r14
(
va_upd_reg64
rR13
va_x_r13
(
va_upd_reg64
rR12
va_x_r12
(
va_upd_reg64
rR11
va_x_r11
(
va_upd_reg64
rR10
va_x_r10
(
va_upd_reg64
rR9
va_x_r9
(
va_upd_reg64
rR8
va_x_r8
(
va_upd_reg64
rRbp
va_x_rbp
(
va_upd_reg64
rRsp
va_x_rsp
(
va_upd_reg64
rRsi
va_x_rsi
(
va_upd_reg64
rRdi
va_x_rdi
(
va_upd_reg64
rRdx
va_x_rdx
(
va_upd_reg64
rRcx
va_x_rcx
(
va_upd_reg64
rRbx
va_x_rbx
(
va_upd_reg64
rRax
va_x_rax
(
va_upd_mem
va_x_mem
va_s0
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
))
))))))))))
)))))))))))
in
va_get_ok va_sM /\
(let in_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0)
in
let out_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0)
in
let inout_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64 rRcx va_s0)
in
let keys_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0)
(va_get_stack va_s0))
(fun _ -> va_get_reg64 rR8 va_s0)
in
let ctr_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8)
(va_get_stack va_s0))
(fun _ -> va_get_reg64 rR9 va_s0)
in
Vale.X64.Decls.modifies_buffer128_2 out_b inout_b (va_get_mem va_s0) (va_get_mem va_sM) /\
(let plain_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem va_s0) in_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) inout_b)
in
let plain_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_quads)
0
num_bytes
in
let cipher_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem va_sM) out_b)
(Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b)
in
let cipher_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_quads)
0
num_bytes
in
cipher_bytes ==
Vale.AES.GCTR_s.gctr_encrypt_LE (Vale.X64.Decls.buffer128_read ctr_b 0 (va_get_mem va_s0))
(Vale.AES.GCTR.make_gctr_plain_LE plain_bytes)
alg
key /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\
(win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\
(win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\
(win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\
(win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\
(win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\
(win ==> va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\
(win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\
(win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\
(win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\
(win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\
(win ==> va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\
(win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\
(win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\
(~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\
(~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.AES.X64.GCTR.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.AESCTRplain.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCTR.fsti"
} | [
"total"
] | [
"Prims.bool",
"Vale.AES.AES_common_s.algorithm",
"Vale.X64.Memory.buffer128",
"Vale.X64.Memory.nat64",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Prims.eq2",
"Vale.Def.Words_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Stack_i.init_rsp",
"Vale.X64.Decls.va_get_stack",
"Vale.X64.Memory.is_initial_heap",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Decls.va_get_mem",
"Prims.l_imp",
"Vale.X64.Stack_i.valid_stack_slot64",
"Prims.op_Addition",
"Vale.Arch.HeapTypes_s.Public",
"Vale.X64.Decls.va_get_stackTaint",
"Prims.l_not",
"Vale.X64.Decls.va_if",
"Vale.Def.Types_s.nat64",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Stack_i.load_stack64",
"Vale.X64.Decls.buffers_disjoint128",
"Prims.l_or",
"Vale.X64.Decls.buffer_disjoints128",
"Prims.Cons",
"Prims.Nil",
"Vale.X64.Decls.validSrcAddrs128",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.validDstAddrs128",
"Vale.AES.AES_common_s.nr",
"Prims.op_LessThan",
"Prims.op_Multiply",
"Vale.X64.Machine_s.pow2_64",
"Prims.nat",
"Vale.X64.Decls.buffer_length",
"Vale.X64.Memory.vuint128",
"Prims.int",
"Vale.X64.Machine_s.pow2_32",
"Prims.op_LessThanOrEqual",
"Prims.op_Division",
"Vale.X64.CPU_Features_s.aesni_enabled",
"Vale.X64.CPU_Features_s.avx_enabled",
"Vale.X64.CPU_Features_s.sse_enabled",
"Prims.op_Equality",
"Vale.AES.AES_common_s.AES_128",
"Vale.AES.AES_common_s.AES_256",
"Vale.AES.AES_s.is_aes_key_LE",
"Vale.Def.Types_s.quad32",
"Vale.X64.Decls.buffer128_as_seq",
"Vale.AES.AES_s.key_to_round_keys_LE",
"Vale.X64.Decls.va_int_range",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rRdi",
"Prims.l_Forall",
"Vale.X64.InsBasic.vale_heap",
"Vale.X64.Decls.quad32",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Vale.X64.Flags.t",
"Vale.X64.InsBasic.vale_stack",
"Vale.X64.Memory.memtaint",
"Vale.X64.Decls.modifies_buffer128_2",
"Vale.Def.Types_s.nat8",
"Vale.AES.GCTR_s.gctr_encrypt_LE",
"Vale.X64.Decls.buffer128_read",
"Vale.AES.GCTR.make_gctr_plain_LE",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR15",
"Vale.X64.Decls.va_get_xmm",
"Vale.Def.Words_s.nat8",
"FStar.Seq.Base.slice",
"Vale.Def.Types_s.le_seq_quad32_to_bytes",
"FStar.Seq.Base.append",
"Vale.X64.Decls.s128",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_stackTaint",
"Vale.X64.Decls.va_upd_stack",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_mem_layout",
"Vale.X64.Decls.va_upd_mem_heaplet",
"Vale.X64.Decls.va_upd_xmm",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rRax",
"Vale.X64.Decls.va_upd_mem"
] | [] | module Vale.AES.X64.GCTR
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open FStar.Seq
open Vale.AES.AES_s
open Vale.AES.X64.AES
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM_helpers
open Vale.Poly1305.Math
open Vale.Def.Words.Two_s
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.AESCTRplain
open Vale.X64.CPU_Features_s
#reset-options "--z3rlimit 30"
//-- Inc32
val va_code_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_code
val va_codegen_success_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> Tot va_pbool
val va_lemma_Inc32 : va_b0:va_code -> va_s0:va_state -> dst:va_operand_xmm -> one:va_operand_xmm
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Inc32 dst one) va_s0 /\ va_is_dst_xmm dst va_s0 /\
va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\ va_eval_xmm va_s0 one ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_xmm va_sM dst == Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 /\ va_state_eq va_sM
(va_update_flags va_sM (va_update_ok va_sM (va_update_operand_xmm dst va_sM va_s0)))))
[@ va_qattr]
let va_wp_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_is_dst_xmm dst va_s0 /\ va_is_src_xmm one va_s0 /\ va_get_ok va_s0 /\ sse_enabled /\
va_eval_xmm va_s0 one == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 1 0 0 0 /\ (forall
(va_x_dst:va_value_xmm) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl
(va_upd_operand_xmm dst va_x_dst va_s0) in va_get_ok va_sM /\ va_eval_xmm va_sM dst ==
Vale.AES.GCTR_s.inc32 (va_eval_xmm va_s0 dst) 1 ==> va_k va_sM (())))
val va_wpProof_Inc32 : dst:va_operand_xmm -> one:va_operand_xmm -> va_s0:va_state -> va_k:(va_state
-> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Inc32 dst one va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Inc32 dst one) ([va_Mod_flags;
va_mod_xmm dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Inc32 (dst:va_operand_xmm) (one:va_operand_xmm) : (va_quickCode unit (va_code_Inc32
dst one)) =
(va_QProc (va_code_Inc32 dst one) ([va_Mod_flags; va_mod_xmm dst]) (va_wp_Inc32 dst one)
(va_wpProof_Inc32 dst one))
//--
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12
va_sM (va_update_flags va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (aesni_enabled /\ sse_enabled /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_efl:Vale.X64.Flags.t)
(va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12 (va_upd_flags va_x_efl (va_upd_xmm 2
va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 va_s0)))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 1 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 7 va_sM) (Vale.Def.Types_s.le_quad32_to_bytes
(va_get_xmm 1 va_s0)) alg key /\ va_get_xmm 1 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block
(va_get_xmm 7 va_sM) (va_get_xmm 1 va_s0) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0,
va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b) (va_wpProof_Gctr_register alg
key round_keys keys_b))
//--
//-- Gctr_bytes_extra_work
val va_code_Gctr_bytes_extra_work : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_bytes_extra_work : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_bytes_extra_work : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->
icb_BE:quad32 -> in_b:buffer128 -> out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32)
-> keys_b:buffer128 -> orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_bytes_extra_work alg) va_s0 /\ va_get_ok va_s0 /\
(Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (aesni_enabled /\ sse_enabled) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys /\
(let num_blocks = num_bytes `op_Division` 16 in num_bytes `op_Modulus` 16 =!= 0 /\ va_get_reg64
rR9 va_s0 == orig_in_ptr + 16 `op_Multiply` num_blocks /\ va_get_reg64 rR10 va_s0 ==
orig_out_ptr + 16 `op_Multiply` num_blocks /\ Vale.AES.GCTR.gctr_partial_def alg num_blocks
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE /\ va_get_xmm 7
va_s0 == Vale.AES.GCTR_s.inc32 icb_BE num_blocks))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let num_blocks = num_bytes `op_Division` 16 in Vale.X64.Decls.modifies_buffer128 out_b
(va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\ FStar.Seq.Base.slice
#Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_sM) out_b) 0
num_blocks == FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq
(va_get_mem_heaplet 1 va_s0) out_b) 0 num_blocks /\ Vale.X64.Decls.buffer128_read out_b
num_blocks (va_get_mem_heaplet 1 va_sM) == Vale.AES.GCTR_s.gctr_encrypt_block icb_BE
(Vale.X64.Decls.buffer128_read in_b num_blocks (va_get_mem_heaplet 0 va_sM)) alg key num_blocks
/\ va_get_xmm 1 va_sM == Vale.X64.Decls.buffer128_read out_b num_blocks (va_get_mem_heaplet 1
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM
(va_update_xmm 4 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR12 va_sM (va_update_reg64 rRdx va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0))))))))))))
[@ va_qattr]
let va_wp_Gctr_bytes_extra_work (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64)
(orig_out_ptr:nat64) (num_bytes:nat) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr
in_b (Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (aesni_enabled /\ sse_enabled) /\ (alg = AES_128 \/ alg = AES_256) /\
Vale.AES.AES_s.is_aes_key_LE alg key /\ FStar.Seq.Base.length #quad32 round_keys ==
Vale.AES.AES_common_s.nr alg + 1 /\ round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key
/\ va_get_reg64 rR8 va_s0 == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b
(va_get_mem_heaplet 0 va_s0) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR8 va_s0) keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) keys_b == round_keys /\
(let num_blocks = num_bytes `op_Division` 16 in num_bytes `op_Modulus` 16 =!= 0 /\ va_get_reg64
rR9 va_s0 == orig_in_ptr + 16 `op_Multiply` num_blocks /\ va_get_reg64 rR10 va_s0 ==
orig_out_ptr + 16 `op_Multiply` num_blocks /\ Vale.AES.GCTR.gctr_partial_def alg num_blocks
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE /\ va_get_xmm 7
va_s0 == Vale.AES.GCTR_s.inc32 icb_BE num_blocks)) /\ (forall (va_x_mem:vale_heap)
(va_x_rdx:nat64) (va_x_r12:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm4:quad32) (va_x_heap1:vale_heap) (va_x_efl:Vale.X64.Flags.t) . let va_sM =
va_upd_flags va_x_efl (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 2
va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRdx va_x_rdx (va_upd_mem va_x_mem va_s0)))))))) in va_get_ok va_sM /\ (let
num_blocks = num_bytes `op_Division` 16 in Vale.X64.Decls.modifies_buffer128 out_b
(va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\ FStar.Seq.Base.slice
#Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_sM) out_b) 0
num_blocks == FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.buffer128_as_seq
(va_get_mem_heaplet 1 va_s0) out_b) 0 num_blocks /\ Vale.X64.Decls.buffer128_read out_b
num_blocks (va_get_mem_heaplet 1 va_sM) == Vale.AES.GCTR_s.gctr_encrypt_block icb_BE
(Vale.X64.Decls.buffer128_read in_b num_blocks (va_get_mem_heaplet 0 va_sM)) alg key num_blocks
/\ va_get_xmm 1 va_sM == Vale.X64.Decls.buffer128_read out_b num_blocks (va_get_mem_heaplet 1
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Gctr_bytes_extra_work : alg:algorithm -> icb_BE:quad32 -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->
orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys
keys_b orig_in_ptr orig_out_ptr num_bytes va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_bytes_extra_work alg)
([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 4; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_bytes_extra_work (alg:algorithm) (icb_BE:quad32) (in_b:buffer128)
(out_b:buffer128) (key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128)
(orig_in_ptr:nat64) (orig_out_ptr:nat64) (num_bytes:nat) : (va_quickCode unit
(va_code_Gctr_bytes_extra_work alg)) =
(va_QProc (va_code_Gctr_bytes_extra_work alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 4;
va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_mem])
(va_wp_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys keys_b orig_in_ptr
orig_out_ptr num_bytes) (va_wpProof_Gctr_bytes_extra_work alg icb_BE in_b out_b key round_keys
keys_b orig_in_ptr orig_out_ptr num_bytes))
//--
//-- Gctr_bytes_no_extra
val va_code_Gctr_bytes_no_extra : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_bytes_no_extra : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_bytes_no_extra : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->
icb_BE:quad32 -> in_b:buffer128 -> out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32)
-> keys_b:buffer128 -> orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_bytes_no_extra alg) va_s0 /\ va_get_ok va_s0 /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg
key /\ FStar.Seq.Base.length #quad32 round_keys == Vale.AES.AES_common_s.nr alg + 1 /\
round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key /\ (let num_blocks = num_bytes
`op_Division` 16 in num_bytes `op_Modulus` 16 == 0 /\ Vale.AES.GCTR.gctr_partial_def alg
num_blocks (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ (let plain = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0
va_sM) in_b)) 0 num_bytes in let cipher = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1
va_sM) out_b)) 0 num_bytes in cipher == Vale.AES.GCTR_s.gctr_encrypt_LE icb_BE
(Vale.AES.GCTR.make_gctr_plain_LE plain) alg key) /\ va_state_eq va_sM (va_update_ok va_sM
va_s0)))
[@ va_qattr]
let va_wp_Gctr_bytes_no_extra (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64)
(orig_out_ptr:nat64) (num_bytes:nat) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) orig_in_ptr in_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) orig_out_ptr out_b
(Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes) (va_get_mem_layout va_s0) Secret /\
orig_in_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
orig_out_ptr + 16 `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes < pow2_64 /\
l_and (l_and (l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out_b == Vale.AES.GCM_helpers.bytes_to_quad_size num_bytes))
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b `op_Multiply` 16 < pow2_32))
(num_bytes < pow2_32) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg
key /\ FStar.Seq.Base.length #quad32 round_keys == Vale.AES.AES_common_s.nr alg + 1 /\
round_keys == Vale.AES.AES_s.key_to_round_keys_LE alg key /\ (let num_blocks = num_bytes
`op_Division` 16 in num_bytes `op_Modulus` 16 == 0 /\ Vale.AES.GCTR.gctr_partial_def alg
num_blocks (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0 va_s0) in_b)
(Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1 va_s0) out_b) key icb_BE) /\ (let va_sM
= va_s0 in va_get_ok va_sM /\ Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1
va_s0) (va_get_mem_heaplet 1 va_sM) /\ (let plain = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 0
va_sM) in_b)) 0 num_bytes in let cipher = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem_heaplet 1
va_sM) out_b)) 0 num_bytes in cipher == Vale.AES.GCTR_s.gctr_encrypt_LE icb_BE
(Vale.AES.GCTR.make_gctr_plain_LE plain) alg key) ==> va_k va_sM (())))
val va_wpProof_Gctr_bytes_no_extra : alg:algorithm -> icb_BE:quad32 -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->
orig_in_ptr:nat64 -> orig_out_ptr:nat64 -> num_bytes:nat -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_bytes_no_extra alg icb_BE in_b out_b key round_keys
keys_b orig_in_ptr orig_out_ptr num_bytes va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_bytes_no_extra alg) ([]) va_s0
va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_bytes_no_extra (alg:algorithm) (icb_BE:quad32) (in_b:buffer128) (out_b:buffer128)
(key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (orig_in_ptr:nat64)
(orig_out_ptr:nat64) (num_bytes:nat) : (va_quickCode unit (va_code_Gctr_bytes_no_extra alg)) =
(va_QProc (va_code_Gctr_bytes_no_extra alg) ([]) (va_wp_Gctr_bytes_no_extra alg icb_BE in_b out_b
key round_keys keys_b orig_in_ptr orig_out_ptr num_bytes) (va_wpProof_Gctr_bytes_no_extra alg
icb_BE in_b out_b key round_keys keys_b orig_in_ptr orig_out_ptr num_bytes))
//--
//-- Gctr_bytes_stdcall
val va_code_Gctr_bytes_stdcall : win:bool -> alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_bytes_stdcall : win:bool -> alg:algorithm -> Tot va_pbool
let va_req_Gctr_bytes_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(in_b:buffer128) (num_bytes:nat64) (out_b:buffer128) (inout_b:buffer128) (keys_b:buffer128)
(ctr_b:buffer128) (num_blocks:nat64) (key:(seq nat32)) : prop =
(va_require_total va_b0 (va_code_Gctr_bytes_stdcall win alg) va_s0 /\ va_get_ok va_s0 /\ (let
(in_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (out_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (ctr_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack
va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (win
==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0))
/\ (win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 16)
(va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ num_bytes == (if win then va_get_reg64 rRdx va_s0 else
va_get_reg64 rRsi va_s0) /\ num_blocks == (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 32 + 8 + 16) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0)) /\ Vale.X64.Decls.buffers_disjoint128 in_b
out_b /\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\ (Vale.X64.Decls.buffers_disjoint128
in_b keys_b \/ in_b == keys_b) /\ Vale.X64.Decls.buffer_disjoints128 ctr_b ([in_b; out_b;
keys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([in_b; out_b; keys_b; ctr_b]) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in_ptr in_b num_blocks (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out_ptr out_b num_blocks
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128
(va_get_mem va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) ctr_ptr ctr_b 1 (va_get_mem_layout
va_s0) Secret /\ in_ptr + 16 `op_Multiply` num_blocks < pow2_64 /\ out_ptr + 16 `op_Multiply`
num_blocks < pow2_64 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
num_blocks /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 ctr_b == 1 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
inout_b == 1 /\ 256 `op_Multiply` Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b <
pow2_32 /\ 4096 `op_Multiply` num_blocks `op_Multiply` 16 < pow2_32 /\ (num_blocks
`op_Multiply` 128 `op_Division` 8 <= num_bytes /\ num_bytes < num_blocks `op_Multiply` 128
`op_Division` 8 + 128 `op_Division` 8) /\ (aesni_enabled /\ avx_enabled /\ sse_enabled) /\ (alg
= AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key))
let va_ens_Gctr_bytes_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(in_b:buffer128) (num_bytes:nat64) (out_b:buffer128) (inout_b:buffer128) (keys_b:buffer128)
(ctr_b:buffer128) (num_blocks:nat64) (key:(seq nat32)) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Gctr_bytes_stdcall va_b0 va_s0 win alg in_b num_bytes out_b inout_b keys_b ctr_b
num_blocks key /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (let
(in_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (out_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (ctr_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in Vale.X64.Decls.modifies_buffer128_2 out_b inout_b (va_get_mem va_s0)
(va_get_mem va_sM) /\ (let plain_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) inout_b)
in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_quads) 0 num_bytes in let cipher_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out_b)
(Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let cipher_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_quads) 0 num_bytes in
cipher_bytes == Vale.AES.GCTR_s.gctr_encrypt_LE (Vale.X64.Decls.buffer128_read ctr_b 0
(va_get_mem va_s0)) (Vale.AES.GCTR.make_gctr_plain_LE plain_bytes) alg key /\ va_get_reg64 rRsp
va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM (va_update_flags va_sM
(va_update_mem_layout va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1 va_sM
(va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM (va_update_xmm 12 va_sM
(va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM
(va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM
va_s0)))))))))))))))))))))))))))))))))))))))))
val va_lemma_Gctr_bytes_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> alg:algorithm ->
in_b:buffer128 -> num_bytes:nat64 -> out_b:buffer128 -> inout_b:buffer128 -> keys_b:buffer128 ->
ctr_b:buffer128 -> num_blocks:nat64 -> key:(seq nat32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_bytes_stdcall win alg) va_s0 /\ va_get_ok va_s0
/\ (let (in_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0
else va_get_reg64 rRdi va_s0) in let (out_ptr:(va_int_range 0 18446744073709551615)) = (if win
then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (ctr_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack
va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (win
==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0))
/\ (win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 16)
(va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ num_bytes == (if win then va_get_reg64 rRdx va_s0 else
va_get_reg64 rRsi va_s0) /\ num_blocks == (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 32 + 8 + 16) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0)) /\ Vale.X64.Decls.buffers_disjoint128 in_b
out_b /\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\ (Vale.X64.Decls.buffers_disjoint128
in_b keys_b \/ in_b == keys_b) /\ Vale.X64.Decls.buffer_disjoints128 ctr_b ([in_b; out_b;
keys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([in_b; out_b; keys_b; ctr_b]) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in_ptr in_b num_blocks (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out_ptr out_b num_blocks
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128
(va_get_mem va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) ctr_ptr ctr_b 1 (va_get_mem_layout
va_s0) Secret /\ in_ptr + 16 `op_Multiply` num_blocks < pow2_64 /\ out_ptr + 16 `op_Multiply`
num_blocks < pow2_64 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
num_blocks /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 ctr_b == 1 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
inout_b == 1 /\ 256 `op_Multiply` Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b <
pow2_32 /\ 4096 `op_Multiply` num_blocks `op_Multiply` 16 < pow2_32 /\ (num_blocks
`op_Multiply` 128 `op_Division` 8 <= num_bytes /\ num_bytes < num_blocks `op_Multiply` 128
`op_Division` 8 + 128 `op_Division` 8) /\ (aesni_enabled /\ avx_enabled /\ sse_enabled) /\ (alg
= AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (in_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (out_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (ctr_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in Vale.X64.Decls.modifies_buffer128_2 out_b inout_b (va_get_mem va_s0)
(va_get_mem va_sM) /\ (let plain_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) inout_b)
in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_quads) 0 num_bytes in let cipher_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out_b)
(Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let cipher_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_quads) 0 num_bytes in
cipher_bytes == Vale.AES.GCTR_s.gctr_encrypt_LE (Vale.X64.Decls.buffer128_read ctr_b 0
(va_get_mem va_s0)) (Vale.AES.GCTR.make_gctr_plain_LE plain_bytes) alg key /\ va_get_reg64 rRsp
va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM (va_update_flags va_sM
(va_update_mem_layout va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1 va_sM
(va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM (va_update_xmm 12 va_sM
(va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM
(va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM
va_s0))))))))))))))))))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_bytes_stdcall (win:bool) (alg:algorithm) (in_b:buffer128) (num_bytes:nat64)
(out_b:buffer128) (inout_b:buffer128) (keys_b:buffer128) (ctr_b:buffer128) (num_blocks:nat64) | false | true | Vale.AES.X64.GCTR.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Gctr_bytes_stdcall
(win: bool)
(alg: algorithm)
(in_b: buffer128)
(num_bytes: nat64)
(out_b inout_b keys_b ctr_b: buffer128)
(num_blocks: nat64)
(key: (seq nat32))
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.AES.X64.GCTR.va_wp_Gctr_bytes_stdcall | {
"file_name": "obj/Vale.AES.X64.GCTR.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
win: Prims.bool ->
alg: Vale.AES.AES_common_s.algorithm ->
in_b: Vale.X64.Memory.buffer128 ->
num_bytes: Vale.X64.Memory.nat64 ->
out_b: Vale.X64.Memory.buffer128 ->
inout_b: Vale.X64.Memory.buffer128 ->
keys_b: Vale.X64.Memory.buffer128 ->
ctr_b: Vale.X64.Memory.buffer128 ->
num_blocks: Vale.X64.Memory.nat64 ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 21,
"end_line": 611,
"start_col": 2,
"start_line": 513
} |
Prims.Tot | val va_quick_Sha_update_bytes_stdcall
(win: bool)
(ctx_b in_b: buffer128)
(num_val: nat64)
(k_b: buffer128)
: (va_quickCode unit (va_code_Sha_update_bytes_stdcall win)) | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Loops",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Hash.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Agile.Hash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.SHA2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.SHA.SHA_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsSha",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Loops",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Hash.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Agile.Hash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.SHA2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.SHA.SHA_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsSha",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.SHA",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.SHA",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Sha_update_bytes_stdcall (win:bool) (ctx_b:buffer128) (in_b:buffer128) (num_val:nat64)
(k_b:buffer128) : (va_quickCode unit (va_code_Sha_update_bytes_stdcall win)) =
(va_QProc (va_code_Sha_update_bytes_stdcall win) ([va_Mod_stackTaint; va_Mod_stack;
va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm
13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7;
va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm
0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64
rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp; va_Mod_reg64
rRbp; va_Mod_reg64 rRdi; va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64
rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Sha_update_bytes_stdcall win ctx_b in_b num_val
k_b) (va_wpProof_Sha_update_bytes_stdcall win ctx_b in_b num_val k_b)) | val va_quick_Sha_update_bytes_stdcall
(win: bool)
(ctx_b in_b: buffer128)
(num_val: nat64)
(k_b: buffer128)
: (va_quickCode unit (va_code_Sha_update_bytes_stdcall win))
let va_quick_Sha_update_bytes_stdcall
(win: bool)
(ctx_b in_b: buffer128)
(num_val: nat64)
(k_b: buffer128)
: (va_quickCode unit (va_code_Sha_update_bytes_stdcall win)) = | false | null | false | (va_QProc (va_code_Sha_update_bytes_stdcall win)
([
va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags;
va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10;
va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14;
va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9;
va_Mod_reg64 rR8; va_Mod_reg64 rRsp; va_Mod_reg64 rRbp; va_Mod_reg64 rRdi; va_Mod_reg64 rRsi;
va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem
])
(va_wp_Sha_update_bytes_stdcall win ctx_b in_b num_val k_b)
(va_wpProof_Sha_update_bytes_stdcall win ctx_b in_b num_val k_b)) | {
"checked_file": "Vale.SHA.X64.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsSha.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.SHA.SHA_helpers.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Spec.SHA2.fsti.checked",
"Spec.Loops.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.Hash.fsti.checked",
"prims.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.SHA.X64.fsti"
} | [
"total"
] | [
"Prims.bool",
"Vale.X64.Memory.buffer128",
"Vale.X64.Memory.nat64",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.SHA.X64.va_code_Sha_update_bytes_stdcall",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_stackTaint",
"Vale.X64.QuickCode.va_Mod_stack",
"Vale.X64.QuickCode.va_Mod_mem_layout",
"Vale.X64.QuickCode.va_Mod_mem_heaplet",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_xmm",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rR15",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRax",
"Vale.X64.QuickCode.va_Mod_mem",
"Prims.Nil",
"Vale.SHA.X64.va_wp_Sha_update_bytes_stdcall",
"Vale.SHA.X64.va_wpProof_Sha_update_bytes_stdcall",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.SHA.X64
open Vale.Def.Opaque_s
open Vale.Def.Types_s
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open FStar.Seq
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.InsVector
open Vale.X64.InsSha
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.SHA.SHA_helpers
open Spec.SHA2
open Spec.Agile.Hash
open Spec.Hash.Definitions
open Spec.Loops
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
#reset-options "--z3rlimit 40"
//-- Sha_update_bytes_stdcall
val va_code_Sha_update_bytes_stdcall : win:bool -> Tot va_code
val va_codegen_success_Sha_update_bytes_stdcall : win:bool -> Tot va_pbool
let va_req_Sha_update_bytes_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (ctx_b:buffer128)
(in_b:buffer128) (num_val:nat64) (k_b:buffer128) : prop =
(va_require_total va_b0 (va_code_Sha_update_bytes_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(ctx_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (in_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (num:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (k_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack
va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(sha_enabled /\ sse_enabled) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 ctx_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 in_b]))
(ctx_b == in_b) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 ctx_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b]))
(ctx_b == k_b) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 in_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b]))
(in_b == k_b) /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) ctx_ptr ctx_b 2
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in_ptr
in_b (4 `op_Multiply` num) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128
(va_get_mem va_s0) k_ptr k_b 16 (va_get_mem_layout va_s0) Secret /\ num_val == num /\ in_ptr +
64 `op_Multiply` num < pow2_64 /\ Vale.X64.Decls.buffers_disjoint128 ctx_b in_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 ctx_b == 2 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b == 4 `op_Multiply` num /\
Vale.SHA.SHA_helpers.k_reqs (Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) k_b)))
let va_ens_Sha_update_bytes_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (ctx_b:buffer128)
(in_b:buffer128) (num_val:nat64) (k_b:buffer128) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Sha_update_bytes_stdcall va_b0 va_s0 win ctx_b in_b num_val k_b /\ va_ensure_total va_b0
va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (let (ctx_ptr:(va_int_range 0 18446744073709551615)) =
(if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) in let (in_ptr:(va_int_range
0 18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0)
in let (num:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else
va_get_reg64 rRdx va_s0) in let (k_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in let hash_in =
Vale.SHA.SHA_helpers.le_bytes_to_hash (Vale.Def.Types_s.le_seq_quad32_to_bytes
(Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) ctx_b)) in let hash_out =
Vale.SHA.SHA_helpers.le_bytes_to_hash (Vale.Def.Types_s.le_seq_quad32_to_bytes
(Vale.X64.Decls.buffer128_as_seq (va_get_mem va_sM) ctx_b)) in (let input_LE =
Vale.Def.Words.Seq_s.seq_nat8_to_seq_uint8 (Vale.Def.Types_s.le_seq_quad32_to_bytes
(Vale.X64.Decls.buffer128_as_seq (va_get_mem va_sM) in_b)) in l_and (FStar.Seq.Base.length
#FStar.UInt8.t input_LE `op_Modulus` 64 == 0) (hash_out ==
Vale.SHA.SHA_helpers.update_multi_transparent hash_in input_LE)) /\ Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 ctx_b) (va_get_mem va_s0) (va_get_mem
va_sM) /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64
rRsi va_sM == va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12
va_s0) /\ (win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64
rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM ==
va_get_reg64 rR12 va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15
va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\ (win
==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==> va_get_xmm 8 va_sM == va_get_xmm 8
va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\ (win ==> va_get_xmm 10 va_sM ==
va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\ (win ==>
va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==> va_get_xmm 13 va_sM == va_get_xmm 13
va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\ (win ==> va_get_xmm 15 va_sM
== va_get_xmm 15 va_s0)) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM (va_update_xmm 12 va_sM
(va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM
va_s0))))))))))))))))))))))))))))))))))))))))
val va_lemma_Sha_update_bytes_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool ->
ctx_b:buffer128 -> in_b:buffer128 -> num_val:nat64 -> k_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Sha_update_bytes_stdcall win) va_s0 /\ va_get_ok va_s0
/\ (let (ctx_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0
else va_get_reg64 rRdi va_s0) in let (in_ptr:(va_int_range 0 18446744073709551615)) = (if win
then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (num:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (k_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack
va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(sha_enabled /\ sse_enabled) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 ctx_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 in_b]))
(ctx_b == in_b) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 ctx_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b]))
(ctx_b == k_b) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 in_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b]))
(in_b == k_b) /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) ctx_ptr ctx_b 2
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in_ptr
in_b (4 `op_Multiply` num) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128
(va_get_mem va_s0) k_ptr k_b 16 (va_get_mem_layout va_s0) Secret /\ num_val == num /\ in_ptr +
64 `op_Multiply` num < pow2_64 /\ Vale.X64.Decls.buffers_disjoint128 ctx_b in_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 ctx_b == 2 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b == 4 `op_Multiply` num /\
Vale.SHA.SHA_helpers.k_reqs (Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) k_b))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (ctx_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0
else va_get_reg64 rRdi va_s0) in let (in_ptr:(va_int_range 0 18446744073709551615)) = (if win
then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (num:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (k_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let hash_in = Vale.SHA.SHA_helpers.le_bytes_to_hash
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0)
ctx_b)) in let hash_out = Vale.SHA.SHA_helpers.le_bytes_to_hash
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem va_sM)
ctx_b)) in (let input_LE = Vale.Def.Words.Seq_s.seq_nat8_to_seq_uint8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem va_sM)
in_b)) in l_and (FStar.Seq.Base.length #FStar.UInt8.t input_LE `op_Modulus` 64 == 0) (hash_out
== Vale.SHA.SHA_helpers.update_multi_transparent hash_in input_LE)) /\
Vale.X64.Decls.modifies_mem (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 ctx_b)
(va_get_mem va_s0) (va_get_mem va_sM) /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\
(win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (win ==> va_get_reg64 rRbp
va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi
va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64
rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13
va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64
rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==>
va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6 va_sM ==
va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0)) /\ va_state_eq va_sM
(va_update_stackTaint va_sM (va_update_stack va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_flags va_sM (va_update_xmm 15 va_sM (va_update_xmm 14
va_sM (va_update_xmm 13 va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10
va_sM (va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6
va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2
va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM
(va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Sha_update_bytes_stdcall (win:bool) (ctx_b:buffer128) (in_b:buffer128) (num_val:nat64)
(k_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (ctx_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let (in_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in let (num:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) in let (k_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64
rRcx va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (sha_enabled /\
sse_enabled) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 ctx_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 in_b]))
(ctx_b == in_b) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 ctx_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b]))
(ctx_b == k_b) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 in_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b]))
(in_b == k_b) /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) ctx_ptr ctx_b 2
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in_ptr
in_b (4 `op_Multiply` num) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128
(va_get_mem va_s0) k_ptr k_b 16 (va_get_mem_layout va_s0) Secret /\ num_val == num /\ in_ptr +
64 `op_Multiply` num < pow2_64 /\ Vale.X64.Decls.buffers_disjoint128 ctx_b in_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 ctx_b == 2 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b == 4 `op_Multiply` num /\
Vale.SHA.SHA_helpers.k_reqs (Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) k_b)) /\
(forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rcx:nat64)
(va_x_rdx:nat64) (va_x_rsi:nat64) (va_x_rdi:nat64) (va_x_rbp:nat64) (va_x_rsp:nat64)
(va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64) (va_x_r12:nat64)
(va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32)
(va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32)
(va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32) (va_x_xmm10:quad32)
(va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32) (va_x_xmm14:quad32)
(va_x_xmm15:quad32) (va_x_efl:Vale.X64.Flags.t) (va_x_heap0:vale_heap)
(va_x_memLayout:vale_heap_layout) (va_x_stack:vale_stack) (va_x_stackTaint:memtaint) . let
va_sM = va_upd_stackTaint va_x_stackTaint (va_upd_stack va_x_stack (va_upd_mem_layout
va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags va_x_efl (va_upd_xmm 15
va_x_xmm15 (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12 va_x_xmm12
(va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8
va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4
va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0
va_x_xmm0 (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13
(va_upd_reg64 rR12 va_x_r12 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10
(va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64
rRbp va_x_rbp (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdx
va_x_rdx (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax
(va_upd_mem va_x_mem va_s0))))))))))))))))))))))))))))))))))))) in va_get_ok va_sM /\ (let
(ctx_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0)
(fun _ -> va_get_reg64 rRdi va_s0) in let (in_ptr:(va_int_range 0 18446744073709551615)) =
va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let
(num:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun
_ -> va_get_reg64 rRdx va_s0) in let (k_ptr:(va_int_range 0 18446744073709551615)) = va_if win
(fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64 rRcx va_s0) in let hash_in =
Vale.SHA.SHA_helpers.le_bytes_to_hash (Vale.Def.Types_s.le_seq_quad32_to_bytes
(Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) ctx_b)) in let hash_out =
Vale.SHA.SHA_helpers.le_bytes_to_hash (Vale.Def.Types_s.le_seq_quad32_to_bytes
(Vale.X64.Decls.buffer128_as_seq (va_get_mem va_sM) ctx_b)) in (let input_LE =
Vale.Def.Words.Seq_s.seq_nat8_to_seq_uint8 (Vale.Def.Types_s.le_seq_quad32_to_bytes
(Vale.X64.Decls.buffer128_as_seq (va_get_mem va_sM) in_b)) in l_and (FStar.Seq.Base.length
#FStar.UInt8.t input_LE `op_Modulus` 64 == 0) (hash_out ==
Vale.SHA.SHA_helpers.update_multi_transparent hash_in input_LE)) /\ Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 ctx_b) (va_get_mem va_s0) (va_get_mem
va_sM) /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64
rRsi va_sM == va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12
va_s0) /\ (win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64
rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM ==
va_get_reg64 rR12 va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15
va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\ (win
==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==> va_get_xmm 8 va_sM == va_get_xmm 8
va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\ (win ==> va_get_xmm 10 va_sM ==
va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\ (win ==>
va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==> va_get_xmm 13 va_sM == va_get_xmm 13
va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\ (win ==> va_get_xmm 15 va_sM
== va_get_xmm 15 va_s0)) ==> va_k va_sM (())))
val va_wpProof_Sha_update_bytes_stdcall : win:bool -> ctx_b:buffer128 -> in_b:buffer128 ->
num_val:nat64 -> k_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Sha_update_bytes_stdcall win ctx_b in_b num_val k_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Sha_update_bytes_stdcall win)
([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags;
va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10;
va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm
3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64
rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64
rR8; va_Mod_reg64 rRsp; va_Mod_reg64 rRbp; va_Mod_reg64 rRdi; va_Mod_reg64 rRsi; va_Mod_reg64
rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Sha_update_bytes_stdcall (win:bool) (ctx_b:buffer128) (in_b:buffer128) (num_val:nat64) | false | false | Vale.SHA.X64.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 40,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Sha_update_bytes_stdcall
(win: bool)
(ctx_b in_b: buffer128)
(num_val: nat64)
(k_b: buffer128)
: (va_quickCode unit (va_code_Sha_update_bytes_stdcall win)) | [] | Vale.SHA.X64.va_quick_Sha_update_bytes_stdcall | {
"file_name": "obj/Vale.SHA.X64.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
win: Prims.bool ->
ctx_b: Vale.X64.Memory.buffer128 ->
in_b: Vale.X64.Memory.buffer128 ->
num_val: Vale.X64.Memory.nat64 ->
k_b: Vale.X64.Memory.buffer128
-> Vale.X64.QuickCode.va_quickCode Prims.unit (Vale.SHA.X64.va_code_Sha_update_bytes_stdcall win) | {
"end_col": 74,
"end_line": 279,
"start_col": 2,
"start_line": 271
} |
Prims.Tot | val va_req_Sha_update_bytes_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(ctx_b in_b: buffer128)
(num_val: nat64)
(k_b: buffer128)
: prop | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Loops",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Hash.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Agile.Hash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.SHA2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.SHA.SHA_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsSha",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Loops",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Hash.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Agile.Hash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.SHA2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.SHA.SHA_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsSha",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.SHA",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.SHA",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_req_Sha_update_bytes_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (ctx_b:buffer128)
(in_b:buffer128) (num_val:nat64) (k_b:buffer128) : prop =
(va_require_total va_b0 (va_code_Sha_update_bytes_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(ctx_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (in_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (num:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (k_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack
va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(sha_enabled /\ sse_enabled) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 ctx_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 in_b]))
(ctx_b == in_b) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 ctx_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b]))
(ctx_b == k_b) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 in_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b]))
(in_b == k_b) /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) ctx_ptr ctx_b 2
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in_ptr
in_b (4 `op_Multiply` num) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128
(va_get_mem va_s0) k_ptr k_b 16 (va_get_mem_layout va_s0) Secret /\ num_val == num /\ in_ptr +
64 `op_Multiply` num < pow2_64 /\ Vale.X64.Decls.buffers_disjoint128 ctx_b in_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 ctx_b == 2 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b == 4 `op_Multiply` num /\
Vale.SHA.SHA_helpers.k_reqs (Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) k_b))) | val va_req_Sha_update_bytes_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(ctx_b in_b: buffer128)
(num_val: nat64)
(k_b: buffer128)
: prop
let va_req_Sha_update_bytes_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(ctx_b in_b: buffer128)
(num_val: nat64)
(k_b: buffer128)
: prop = | false | null | false | (va_require_total va_b0 (va_code_Sha_update_bytes_stdcall win) va_s0 /\ va_get_ok va_s0 /\
(let ctx_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0)
in
let in_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0)
in
let num:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0)
in
let k_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0)
in
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(sha_enabled /\ sse_enabled) /\
l_or (Vale.X64.Decls.locs_disjoint ([
Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 ctx_b;
Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 in_b
]))
(ctx_b == in_b) /\
l_or (Vale.X64.Decls.locs_disjoint ([
Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 ctx_b;
Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b
]))
(ctx_b == k_b) /\
l_or (Vale.X64.Decls.locs_disjoint ([
Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 in_b;
Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b
]))
(in_b == k_b) /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
ctx_ptr
ctx_b
2
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in_ptr
in_b
(4 `op_Multiply` num)
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) k_ptr k_b 16 (va_get_mem_layout va_s0) Secret /\
num_val == num /\ in_ptr + 64 `op_Multiply` num < pow2_64 /\
Vale.X64.Decls.buffers_disjoint128 ctx_b in_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 ctx_b == 2 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b == 4 `op_Multiply` num /\
Vale.SHA.SHA_helpers.k_reqs (Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) k_b))) | {
"checked_file": "Vale.SHA.X64.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsSha.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.SHA.SHA_helpers.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Spec.SHA2.fsti.checked",
"Spec.Loops.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.Hash.fsti.checked",
"prims.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.SHA.X64.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_code",
"Vale.X64.Decls.va_state",
"Prims.bool",
"Vale.X64.Memory.buffer128",
"Vale.X64.Memory.nat64",
"Prims.l_and",
"Vale.X64.Decls.va_require_total",
"Vale.SHA.X64.va_code_Sha_update_bytes_stdcall",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Prims.eq2",
"Vale.Def.Words_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Stack_i.init_rsp",
"Vale.X64.Decls.va_get_stack",
"Vale.X64.Memory.is_initial_heap",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Decls.va_get_mem",
"Vale.X64.CPU_Features_s.sha_enabled",
"Vale.X64.CPU_Features_s.sse_enabled",
"Prims.l_or",
"Vale.X64.Decls.locs_disjoint",
"Prims.Cons",
"Vale.X64.Memory.loc",
"Vale.X64.Decls.loc_buffer",
"Vale.X64.Memory.vuint128",
"Prims.Nil",
"Vale.X64.Decls.validDstAddrs128",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.validSrcAddrs128",
"Prims.op_Multiply",
"Prims.int",
"Prims.op_LessThanOrEqual",
"Prims.op_GreaterThanOrEqual",
"Prims.op_LessThan",
"Vale.Def.Words_s.pow2_64",
"Prims.op_Addition",
"Vale.X64.Machine_s.pow2_64",
"Vale.X64.Decls.buffers_disjoint128",
"Vale.X64.Decls.buffer_length",
"Vale.SHA.SHA_helpers.k_reqs",
"Vale.X64.Decls.buffer128_as_seq",
"Vale.X64.Decls.va_int_range",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rRdi",
"Prims.prop"
] | [] | module Vale.SHA.X64
open Vale.Def.Opaque_s
open Vale.Def.Types_s
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open FStar.Seq
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.InsVector
open Vale.X64.InsSha
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.SHA.SHA_helpers
open Spec.SHA2
open Spec.Agile.Hash
open Spec.Hash.Definitions
open Spec.Loops
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
#reset-options "--z3rlimit 40"
//-- Sha_update_bytes_stdcall
val va_code_Sha_update_bytes_stdcall : win:bool -> Tot va_code
val va_codegen_success_Sha_update_bytes_stdcall : win:bool -> Tot va_pbool
let va_req_Sha_update_bytes_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (ctx_b:buffer128) | false | true | Vale.SHA.X64.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 40,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_req_Sha_update_bytes_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(ctx_b in_b: buffer128)
(num_val: nat64)
(k_b: buffer128)
: prop | [] | Vale.SHA.X64.va_req_Sha_update_bytes_stdcall | {
"file_name": "obj/Vale.SHA.X64.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
va_b0: Vale.X64.Decls.va_code ->
va_s0: Vale.X64.Decls.va_state ->
win: Prims.bool ->
ctx_b: Vale.X64.Memory.buffer128 ->
in_b: Vale.X64.Memory.buffer128 ->
num_val: Vale.X64.Memory.nat64 ->
k_b: Vale.X64.Memory.buffer128
-> Prims.prop | {
"end_col": 90,
"end_line": 57,
"start_col": 2,
"start_line": 36
} |
Prims.Tot | val va_ens_Sha_update_bytes_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(ctx_b in_b: buffer128)
(num_val: nat64)
(k_b: buffer128)
(va_sM: va_state)
(va_fM: va_fuel)
: prop | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Loops",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Hash.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Agile.Hash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.SHA2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.SHA.SHA_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsSha",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Loops",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Hash.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Agile.Hash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.SHA2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.SHA.SHA_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsSha",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.SHA",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.SHA",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_ens_Sha_update_bytes_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (ctx_b:buffer128)
(in_b:buffer128) (num_val:nat64) (k_b:buffer128) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Sha_update_bytes_stdcall va_b0 va_s0 win ctx_b in_b num_val k_b /\ va_ensure_total va_b0
va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (let (ctx_ptr:(va_int_range 0 18446744073709551615)) =
(if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) in let (in_ptr:(va_int_range
0 18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0)
in let (num:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else
va_get_reg64 rRdx va_s0) in let (k_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in let hash_in =
Vale.SHA.SHA_helpers.le_bytes_to_hash (Vale.Def.Types_s.le_seq_quad32_to_bytes
(Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) ctx_b)) in let hash_out =
Vale.SHA.SHA_helpers.le_bytes_to_hash (Vale.Def.Types_s.le_seq_quad32_to_bytes
(Vale.X64.Decls.buffer128_as_seq (va_get_mem va_sM) ctx_b)) in (let input_LE =
Vale.Def.Words.Seq_s.seq_nat8_to_seq_uint8 (Vale.Def.Types_s.le_seq_quad32_to_bytes
(Vale.X64.Decls.buffer128_as_seq (va_get_mem va_sM) in_b)) in l_and (FStar.Seq.Base.length
#FStar.UInt8.t input_LE `op_Modulus` 64 == 0) (hash_out ==
Vale.SHA.SHA_helpers.update_multi_transparent hash_in input_LE)) /\ Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 ctx_b) (va_get_mem va_s0) (va_get_mem
va_sM) /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64
rRsi va_sM == va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12
va_s0) /\ (win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64
rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM ==
va_get_reg64 rR12 va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15
va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\ (win
==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==> va_get_xmm 8 va_sM == va_get_xmm 8
va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\ (win ==> va_get_xmm 10 va_sM ==
va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\ (win ==>
va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==> va_get_xmm 13 va_sM == va_get_xmm 13
va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\ (win ==> va_get_xmm 15 va_sM
== va_get_xmm 15 va_s0)) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM (va_update_xmm 12 va_sM
(va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM
va_s0)))))))))))))))))))))))))))))))))))))))) | val va_ens_Sha_update_bytes_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(ctx_b in_b: buffer128)
(num_val: nat64)
(k_b: buffer128)
(va_sM: va_state)
(va_fM: va_fuel)
: prop
let va_ens_Sha_update_bytes_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(ctx_b in_b: buffer128)
(num_val: nat64)
(k_b: buffer128)
(va_sM: va_state)
(va_fM: va_fuel)
: prop = | false | null | false | (va_req_Sha_update_bytes_stdcall va_b0 va_s0 win ctx_b in_b num_val k_b /\
va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let ctx_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0)
in
let in_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0)
in
let num:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0)
in
let k_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0)
in
let hash_in =
Vale.SHA.SHA_helpers.le_bytes_to_hash (Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq
(va_get_mem va_s0)
ctx_b))
in
let hash_out =
Vale.SHA.SHA_helpers.le_bytes_to_hash (Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq
(va_get_mem va_sM)
ctx_b))
in
(let input_LE =
Vale.Def.Words.Seq_s.seq_nat8_to_seq_uint8 (Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq
(va_get_mem va_sM)
in_b))
in
l_and ((FStar.Seq.Base.length #FStar.UInt8.t input_LE) `op_Modulus` 64 == 0)
(hash_out == Vale.SHA.SHA_helpers.update_multi_transparent hash_in input_LE)) /\
Vale.X64.Decls.modifies_mem (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 ctx_b)
(va_get_mem va_s0)
(va_get_mem va_sM) /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\
(win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\
(win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\
(win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\
(~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\
(~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\
(win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\
(win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\
(win ==> va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\
(win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\
(win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\
(win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\
(win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\
(win ==> va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\
(win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\
(win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0)) /\
va_state_eq va_sM
(va_update_stackTaint va_sM
(va_update_stack va_sM
(va_update_mem_layout va_sM
(va_update_mem_heaplet 0
va_sM
(va_update_flags va_sM
(va_update_xmm 15
va_sM
(va_update_xmm 14
va_sM
(va_update_xmm 13
va_sM
(va_update_xmm 12
va_sM
(va_update_xmm 11
va_sM
(va_update_xmm 10
va_sM
(va_update_xmm 9
va_sM
(va_update_xmm 8
va_sM
(va_update_xmm 7
va_sM
(va_update_xmm 6
va_sM
(va_update_xmm 5
va_sM
(va_update_xmm 4
va_sM
(va_update_xmm 3
va_sM
(va_update_xmm 2
va_sM
(va_update_xmm 1
va_sM
(va_update_xmm 0
va_sM
(va_update_reg64
rR15
va_sM
(va_update_reg64
rR14
va_sM
(va_update_reg64
rR13
va_sM
(
va_update_reg64
rR12
va_sM
(
va_update_reg64
rR11
va_sM
(
va_update_reg64
rR10
va_sM
(
va_update_reg64
rR9
va_sM
(
va_update_reg64
rR8
va_sM
(
va_update_reg64
rRsp
va_sM
(
va_update_reg64
rRbp
va_sM
(
va_update_reg64
rRdi
va_sM
(
va_update_reg64
rRsi
va_sM
(
va_update_reg64
rRdx
va_sM
(
va_update_reg64
rRcx
va_sM
(
va_update_reg64
rRbx
va_sM
(
va_update_reg64
rRax
va_sM
(
va_update_ok
va_sM
(
va_update_mem
va_sM
va_s0
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
))
))))))))))))
))))))))))) | {
"checked_file": "Vale.SHA.X64.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsSha.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.SHA.SHA_helpers.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Spec.SHA2.fsti.checked",
"Spec.Loops.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.Hash.fsti.checked",
"prims.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.SHA.X64.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_code",
"Vale.X64.Decls.va_state",
"Prims.bool",
"Vale.X64.Memory.buffer128",
"Vale.X64.Memory.nat64",
"Vale.X64.Decls.va_fuel",
"Prims.l_and",
"Vale.SHA.X64.va_req_Sha_update_bytes_stdcall",
"Vale.X64.Decls.va_ensure_total",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Prims.eq2",
"Prims.int",
"Prims.op_Modulus",
"FStar.Seq.Base.length",
"FStar.UInt8.t",
"Vale.SHA.SHA_helpers.hash256",
"Vale.SHA.SHA_helpers.update_multi_transparent",
"FStar.Seq.Base.seq",
"Vale.Def.Words.Seq_s.seq_nat8_to_seq_uint8",
"Vale.Def.Types_s.le_seq_quad32_to_bytes",
"Vale.X64.Decls.buffer128_as_seq",
"Vale.X64.Decls.va_get_mem",
"Vale.X64.Decls.modifies_mem",
"Vale.X64.Decls.loc_buffer",
"Vale.X64.Memory.vuint128",
"Vale.Def.Types_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Prims.l_imp",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR15",
"Prims.l_not",
"Vale.X64.Decls.quad32",
"Vale.X64.Decls.va_get_xmm",
"Vale.SHA.SHA_helpers.le_bytes_to_hash",
"Vale.X64.Decls.va_int_range",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Decls.va_state_eq",
"Vale.X64.Decls.va_update_stackTaint",
"Vale.X64.Decls.va_update_stack",
"Vale.X64.Decls.va_update_mem_layout",
"Vale.X64.Decls.va_update_mem_heaplet",
"Vale.X64.Decls.va_update_flags",
"Vale.X64.Decls.va_update_xmm",
"Vale.X64.Decls.va_update_reg64",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rRax",
"Vale.X64.Decls.va_update_ok",
"Vale.X64.Decls.va_update_mem",
"Prims.prop"
] | [] | module Vale.SHA.X64
open Vale.Def.Opaque_s
open Vale.Def.Types_s
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open FStar.Seq
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.InsVector
open Vale.X64.InsSha
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.SHA.SHA_helpers
open Spec.SHA2
open Spec.Agile.Hash
open Spec.Hash.Definitions
open Spec.Loops
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
#reset-options "--z3rlimit 40"
//-- Sha_update_bytes_stdcall
val va_code_Sha_update_bytes_stdcall : win:bool -> Tot va_code
val va_codegen_success_Sha_update_bytes_stdcall : win:bool -> Tot va_pbool
let va_req_Sha_update_bytes_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (ctx_b:buffer128)
(in_b:buffer128) (num_val:nat64) (k_b:buffer128) : prop =
(va_require_total va_b0 (va_code_Sha_update_bytes_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(ctx_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (in_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (num:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (k_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack
va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(sha_enabled /\ sse_enabled) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 ctx_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 in_b]))
(ctx_b == in_b) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 ctx_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b]))
(ctx_b == k_b) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 in_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b]))
(in_b == k_b) /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) ctx_ptr ctx_b 2
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in_ptr
in_b (4 `op_Multiply` num) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128
(va_get_mem va_s0) k_ptr k_b 16 (va_get_mem_layout va_s0) Secret /\ num_val == num /\ in_ptr +
64 `op_Multiply` num < pow2_64 /\ Vale.X64.Decls.buffers_disjoint128 ctx_b in_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 ctx_b == 2 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b == 4 `op_Multiply` num /\
Vale.SHA.SHA_helpers.k_reqs (Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) k_b)))
let va_ens_Sha_update_bytes_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (ctx_b:buffer128) | false | true | Vale.SHA.X64.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 40,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_ens_Sha_update_bytes_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(ctx_b in_b: buffer128)
(num_val: nat64)
(k_b: buffer128)
(va_sM: va_state)
(va_fM: va_fuel)
: prop | [] | Vale.SHA.X64.va_ens_Sha_update_bytes_stdcall | {
"file_name": "obj/Vale.SHA.X64.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
va_b0: Vale.X64.Decls.va_code ->
va_s0: Vale.X64.Decls.va_state ->
win: Prims.bool ->
ctx_b: Vale.X64.Memory.buffer128 ->
in_b: Vale.X64.Memory.buffer128 ->
num_val: Vale.X64.Memory.nat64 ->
k_b: Vale.X64.Memory.buffer128 ->
va_sM: Vale.X64.Decls.va_state ->
va_fM: Vale.X64.Decls.va_fuel
-> Prims.prop | {
"end_col": 49,
"end_line": 104,
"start_col": 2,
"start_line": 60
} |
Prims.Tot | val va_wp_Sha_update_bytes_stdcall
(win: bool)
(ctx_b in_b: buffer128)
(num_val: nat64)
(k_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Loops",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Hash.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Agile.Hash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.SHA2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.SHA.SHA_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsSha",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Loops",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Hash.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Agile.Hash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.SHA2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.SHA.SHA_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsSha",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.SHA",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.SHA",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Sha_update_bytes_stdcall (win:bool) (ctx_b:buffer128) (in_b:buffer128) (num_val:nat64)
(k_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (ctx_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let (in_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in let (num:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) in let (k_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64
rRcx va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (sha_enabled /\
sse_enabled) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 ctx_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 in_b]))
(ctx_b == in_b) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 ctx_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b]))
(ctx_b == k_b) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 in_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b]))
(in_b == k_b) /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) ctx_ptr ctx_b 2
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in_ptr
in_b (4 `op_Multiply` num) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128
(va_get_mem va_s0) k_ptr k_b 16 (va_get_mem_layout va_s0) Secret /\ num_val == num /\ in_ptr +
64 `op_Multiply` num < pow2_64 /\ Vale.X64.Decls.buffers_disjoint128 ctx_b in_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 ctx_b == 2 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b == 4 `op_Multiply` num /\
Vale.SHA.SHA_helpers.k_reqs (Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) k_b)) /\
(forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rcx:nat64)
(va_x_rdx:nat64) (va_x_rsi:nat64) (va_x_rdi:nat64) (va_x_rbp:nat64) (va_x_rsp:nat64)
(va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64) (va_x_r12:nat64)
(va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32)
(va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32)
(va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32) (va_x_xmm10:quad32)
(va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32) (va_x_xmm14:quad32)
(va_x_xmm15:quad32) (va_x_efl:Vale.X64.Flags.t) (va_x_heap0:vale_heap)
(va_x_memLayout:vale_heap_layout) (va_x_stack:vale_stack) (va_x_stackTaint:memtaint) . let
va_sM = va_upd_stackTaint va_x_stackTaint (va_upd_stack va_x_stack (va_upd_mem_layout
va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags va_x_efl (va_upd_xmm 15
va_x_xmm15 (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12 va_x_xmm12
(va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8
va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4
va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0
va_x_xmm0 (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13
(va_upd_reg64 rR12 va_x_r12 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10
(va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64
rRbp va_x_rbp (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdx
va_x_rdx (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax
(va_upd_mem va_x_mem va_s0))))))))))))))))))))))))))))))))))))) in va_get_ok va_sM /\ (let
(ctx_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0)
(fun _ -> va_get_reg64 rRdi va_s0) in let (in_ptr:(va_int_range 0 18446744073709551615)) =
va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let
(num:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun
_ -> va_get_reg64 rRdx va_s0) in let (k_ptr:(va_int_range 0 18446744073709551615)) = va_if win
(fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64 rRcx va_s0) in let hash_in =
Vale.SHA.SHA_helpers.le_bytes_to_hash (Vale.Def.Types_s.le_seq_quad32_to_bytes
(Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) ctx_b)) in let hash_out =
Vale.SHA.SHA_helpers.le_bytes_to_hash (Vale.Def.Types_s.le_seq_quad32_to_bytes
(Vale.X64.Decls.buffer128_as_seq (va_get_mem va_sM) ctx_b)) in (let input_LE =
Vale.Def.Words.Seq_s.seq_nat8_to_seq_uint8 (Vale.Def.Types_s.le_seq_quad32_to_bytes
(Vale.X64.Decls.buffer128_as_seq (va_get_mem va_sM) in_b)) in l_and (FStar.Seq.Base.length
#FStar.UInt8.t input_LE `op_Modulus` 64 == 0) (hash_out ==
Vale.SHA.SHA_helpers.update_multi_transparent hash_in input_LE)) /\ Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 ctx_b) (va_get_mem va_s0) (va_get_mem
va_sM) /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64
rRsi va_sM == va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12
va_s0) /\ (win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64
rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM ==
va_get_reg64 rR12 va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15
va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\ (win
==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==> va_get_xmm 8 va_sM == va_get_xmm 8
va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\ (win ==> va_get_xmm 10 va_sM ==
va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\ (win ==>
va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==> va_get_xmm 13 va_sM == va_get_xmm 13
va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\ (win ==> va_get_xmm 15 va_sM
== va_get_xmm 15 va_s0)) ==> va_k va_sM (()))) | val va_wp_Sha_update_bytes_stdcall
(win: bool)
(ctx_b in_b: buffer128)
(num_val: nat64)
(k_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_Sha_update_bytes_stdcall
(win: bool)
(ctx_b in_b: buffer128)
(num_val: nat64)
(k_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_get_ok va_s0 /\
(let ctx_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0)
in
let in_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0)
in
let num:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0)
in
let k_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64 rRcx va_s0)
in
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(sha_enabled /\ sse_enabled) /\
l_or (Vale.X64.Decls.locs_disjoint ([
Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 ctx_b;
Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 in_b
]))
(ctx_b == in_b) /\
l_or (Vale.X64.Decls.locs_disjoint ([
Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 ctx_b;
Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b
]))
(ctx_b == k_b) /\
l_or (Vale.X64.Decls.locs_disjoint ([
Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 in_b;
Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b
]))
(in_b == k_b) /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
ctx_ptr
ctx_b
2
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in_ptr
in_b
(4 `op_Multiply` num)
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) k_ptr k_b 16 (va_get_mem_layout va_s0) Secret /\
num_val == num /\ in_ptr + 64 `op_Multiply` num < pow2_64 /\
Vale.X64.Decls.buffers_disjoint128 ctx_b in_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 ctx_b == 2 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b == 4 `op_Multiply` num /\
Vale.SHA.SHA_helpers.k_reqs (Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) k_b)) /\
(forall (va_x_mem: vale_heap) (va_x_rax: nat64) (va_x_rbx: nat64) (va_x_rcx: nat64)
(va_x_rdx: nat64) (va_x_rsi: nat64) (va_x_rdi: nat64) (va_x_rbp: nat64) (va_x_rsp: nat64)
(va_x_r8: nat64) (va_x_r9: nat64) (va_x_r10: nat64) (va_x_r11: nat64) (va_x_r12: nat64)
(va_x_r13: nat64) (va_x_r14: nat64) (va_x_r15: nat64) (va_x_xmm0: quad32) (va_x_xmm1: quad32)
(va_x_xmm2: quad32) (va_x_xmm3: quad32) (va_x_xmm4: quad32) (va_x_xmm5: quad32)
(va_x_xmm6: quad32) (va_x_xmm7: quad32) (va_x_xmm8: quad32) (va_x_xmm9: quad32)
(va_x_xmm10: quad32) (va_x_xmm11: quad32) (va_x_xmm12: quad32) (va_x_xmm13: quad32)
(va_x_xmm14: quad32) (va_x_xmm15: quad32) (va_x_efl: Vale.X64.Flags.t) (va_x_heap0: vale_heap)
(va_x_memLayout: vale_heap_layout) (va_x_stack: vale_stack) (va_x_stackTaint: memtaint).
let va_sM =
va_upd_stackTaint va_x_stackTaint
(va_upd_stack va_x_stack
(va_upd_mem_layout va_x_memLayout
(va_upd_mem_heaplet 0
va_x_heap0
(va_upd_flags va_x_efl
(va_upd_xmm 15
va_x_xmm15
(va_upd_xmm 14
va_x_xmm14
(va_upd_xmm 13
va_x_xmm13
(va_upd_xmm 12
va_x_xmm12
(va_upd_xmm 11
va_x_xmm11
(va_upd_xmm 10
va_x_xmm10
(va_upd_xmm 9
va_x_xmm9
(va_upd_xmm 8
va_x_xmm8
(va_upd_xmm 7
va_x_xmm7
(va_upd_xmm 6
va_x_xmm6
(va_upd_xmm 5
va_x_xmm5
(va_upd_xmm 4
va_x_xmm4
(va_upd_xmm 3
va_x_xmm3
(va_upd_xmm 2
va_x_xmm2
(va_upd_xmm 1
va_x_xmm1
(va_upd_xmm 0
va_x_xmm0
(va_upd_reg64
rR15
va_x_r15
(va_upd_reg64
rR14
va_x_r14
(va_upd_reg64
rR13
va_x_r13
(
va_upd_reg64
rR12
va_x_r12
(
va_upd_reg64
rR11
va_x_r11
(
va_upd_reg64
rR10
va_x_r10
(
va_upd_reg64
rR9
va_x_r9
(
va_upd_reg64
rR8
va_x_r8
(
va_upd_reg64
rRsp
va_x_rsp
(
va_upd_reg64
rRbp
va_x_rbp
(
va_upd_reg64
rRdi
va_x_rdi
(
va_upd_reg64
rRsi
va_x_rsi
(
va_upd_reg64
rRdx
va_x_rdx
(
va_upd_reg64
rRcx
va_x_rcx
(
va_upd_reg64
rRbx
va_x_rbx
(
va_upd_reg64
rRax
va_x_rax
(
va_upd_mem
va_x_mem
va_s0
)
)
)
)
)
)
)
)
)
)
)
)
)
)
))
))))))))))
)))))))))))
in
va_get_ok va_sM /\
(let ctx_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0)
in
let in_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0)
in
let num:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0)
in
let k_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64 rRcx va_s0)
in
let hash_in =
Vale.SHA.SHA_helpers.le_bytes_to_hash (Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq
(va_get_mem va_s0)
ctx_b))
in
let hash_out =
Vale.SHA.SHA_helpers.le_bytes_to_hash (Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq
(va_get_mem va_sM)
ctx_b))
in
(let input_LE =
Vale.Def.Words.Seq_s.seq_nat8_to_seq_uint8 (Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq
(va_get_mem va_sM)
in_b))
in
l_and ((FStar.Seq.Base.length #FStar.UInt8.t input_LE) `op_Modulus` 64 == 0)
(hash_out == Vale.SHA.SHA_helpers.update_multi_transparent hash_in input_LE)) /\
Vale.X64.Decls.modifies_mem (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 ctx_b)
(va_get_mem va_s0)
(va_get_mem va_sM) /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\
(win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\
(win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\
(win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\
(~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\
(~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\
(win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\
(win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\
(win ==> va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\
(win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\
(win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\
(win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\
(win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\
(win ==> va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\
(win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\
(win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0)) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.SHA.X64.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsSha.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.SHA.SHA_helpers.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Spec.SHA2.fsti.checked",
"Spec.Loops.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.Hash.fsti.checked",
"prims.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.SHA.X64.fsti"
} | [
"total"
] | [
"Prims.bool",
"Vale.X64.Memory.buffer128",
"Vale.X64.Memory.nat64",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Prims.eq2",
"Vale.Def.Words_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Stack_i.init_rsp",
"Vale.X64.Decls.va_get_stack",
"Vale.X64.Memory.is_initial_heap",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Decls.va_get_mem",
"Vale.X64.CPU_Features_s.sha_enabled",
"Vale.X64.CPU_Features_s.sse_enabled",
"Prims.l_or",
"Vale.X64.Decls.locs_disjoint",
"Prims.Cons",
"Vale.X64.Memory.loc",
"Vale.X64.Decls.loc_buffer",
"Vale.X64.Memory.vuint128",
"Prims.Nil",
"Vale.X64.Decls.validDstAddrs128",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.validSrcAddrs128",
"Prims.op_Multiply",
"Prims.int",
"Prims.op_LessThanOrEqual",
"Prims.op_GreaterThanOrEqual",
"Prims.op_LessThan",
"Vale.Def.Words_s.pow2_64",
"Prims.op_Addition",
"Vale.X64.Machine_s.pow2_64",
"Vale.X64.Decls.buffers_disjoint128",
"Vale.X64.Decls.buffer_length",
"Vale.SHA.SHA_helpers.k_reqs",
"Vale.X64.Decls.buffer128_as_seq",
"Vale.X64.Decls.va_int_range",
"Vale.X64.Decls.va_if",
"Vale.Def.Types_s.nat64",
"Vale.X64.Machine_s.rR9",
"Prims.l_not",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rRdi",
"Prims.l_Forall",
"Vale.X64.InsBasic.vale_heap",
"Vale.X64.Decls.quad32",
"Vale.X64.Flags.t",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Vale.X64.InsBasic.vale_stack",
"Vale.X64.Memory.memtaint",
"Prims.l_imp",
"Prims.op_Modulus",
"FStar.Seq.Base.length",
"FStar.UInt8.t",
"Vale.SHA.SHA_helpers.hash256",
"Vale.SHA.SHA_helpers.update_multi_transparent",
"FStar.Seq.Base.seq",
"Vale.Def.Words.Seq_s.seq_nat8_to_seq_uint8",
"Vale.Def.Types_s.le_seq_quad32_to_bytes",
"Vale.X64.Decls.modifies_mem",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR15",
"Vale.X64.Decls.va_get_xmm",
"Vale.SHA.SHA_helpers.le_bytes_to_hash",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_stackTaint",
"Vale.X64.Decls.va_upd_stack",
"Vale.X64.Decls.va_upd_mem_layout",
"Vale.X64.Decls.va_upd_mem_heaplet",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_xmm",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rRax",
"Vale.X64.Decls.va_upd_mem"
] | [] | module Vale.SHA.X64
open Vale.Def.Opaque_s
open Vale.Def.Types_s
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open FStar.Seq
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.InsVector
open Vale.X64.InsSha
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.SHA.SHA_helpers
open Spec.SHA2
open Spec.Agile.Hash
open Spec.Hash.Definitions
open Spec.Loops
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
#reset-options "--z3rlimit 40"
//-- Sha_update_bytes_stdcall
val va_code_Sha_update_bytes_stdcall : win:bool -> Tot va_code
val va_codegen_success_Sha_update_bytes_stdcall : win:bool -> Tot va_pbool
let va_req_Sha_update_bytes_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (ctx_b:buffer128)
(in_b:buffer128) (num_val:nat64) (k_b:buffer128) : prop =
(va_require_total va_b0 (va_code_Sha_update_bytes_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(ctx_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (in_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (num:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (k_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack
va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(sha_enabled /\ sse_enabled) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 ctx_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 in_b]))
(ctx_b == in_b) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 ctx_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b]))
(ctx_b == k_b) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 in_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b]))
(in_b == k_b) /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) ctx_ptr ctx_b 2
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in_ptr
in_b (4 `op_Multiply` num) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128
(va_get_mem va_s0) k_ptr k_b 16 (va_get_mem_layout va_s0) Secret /\ num_val == num /\ in_ptr +
64 `op_Multiply` num < pow2_64 /\ Vale.X64.Decls.buffers_disjoint128 ctx_b in_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 ctx_b == 2 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b == 4 `op_Multiply` num /\
Vale.SHA.SHA_helpers.k_reqs (Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) k_b)))
let va_ens_Sha_update_bytes_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (ctx_b:buffer128)
(in_b:buffer128) (num_val:nat64) (k_b:buffer128) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Sha_update_bytes_stdcall va_b0 va_s0 win ctx_b in_b num_val k_b /\ va_ensure_total va_b0
va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (let (ctx_ptr:(va_int_range 0 18446744073709551615)) =
(if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) in let (in_ptr:(va_int_range
0 18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0)
in let (num:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else
va_get_reg64 rRdx va_s0) in let (k_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in let hash_in =
Vale.SHA.SHA_helpers.le_bytes_to_hash (Vale.Def.Types_s.le_seq_quad32_to_bytes
(Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) ctx_b)) in let hash_out =
Vale.SHA.SHA_helpers.le_bytes_to_hash (Vale.Def.Types_s.le_seq_quad32_to_bytes
(Vale.X64.Decls.buffer128_as_seq (va_get_mem va_sM) ctx_b)) in (let input_LE =
Vale.Def.Words.Seq_s.seq_nat8_to_seq_uint8 (Vale.Def.Types_s.le_seq_quad32_to_bytes
(Vale.X64.Decls.buffer128_as_seq (va_get_mem va_sM) in_b)) in l_and (FStar.Seq.Base.length
#FStar.UInt8.t input_LE `op_Modulus` 64 == 0) (hash_out ==
Vale.SHA.SHA_helpers.update_multi_transparent hash_in input_LE)) /\ Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 ctx_b) (va_get_mem va_s0) (va_get_mem
va_sM) /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64
rRsi va_sM == va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12
va_s0) /\ (win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64
rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM ==
va_get_reg64 rR12 va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15
va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\ (win
==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==> va_get_xmm 8 va_sM == va_get_xmm 8
va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\ (win ==> va_get_xmm 10 va_sM ==
va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\ (win ==>
va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==> va_get_xmm 13 va_sM == va_get_xmm 13
va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\ (win ==> va_get_xmm 15 va_sM
== va_get_xmm 15 va_s0)) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM (va_update_xmm 12 va_sM
(va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM
va_s0))))))))))))))))))))))))))))))))))))))))
val va_lemma_Sha_update_bytes_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool ->
ctx_b:buffer128 -> in_b:buffer128 -> num_val:nat64 -> k_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Sha_update_bytes_stdcall win) va_s0 /\ va_get_ok va_s0
/\ (let (ctx_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0
else va_get_reg64 rRdi va_s0) in let (in_ptr:(va_int_range 0 18446744073709551615)) = (if win
then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (num:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (k_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack
va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(sha_enabled /\ sse_enabled) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 ctx_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 in_b]))
(ctx_b == in_b) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 ctx_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b]))
(ctx_b == k_b) /\ l_or (Vale.X64.Decls.locs_disjoint ([Vale.X64.Decls.loc_buffer
#Vale.X64.Memory.vuint128 in_b; Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 k_b]))
(in_b == k_b) /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) ctx_ptr ctx_b 2
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in_ptr
in_b (4 `op_Multiply` num) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128
(va_get_mem va_s0) k_ptr k_b 16 (va_get_mem_layout va_s0) Secret /\ num_val == num /\ in_ptr +
64 `op_Multiply` num < pow2_64 /\ Vale.X64.Decls.buffers_disjoint128 ctx_b in_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 ctx_b == 2 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b == 4 `op_Multiply` num /\
Vale.SHA.SHA_helpers.k_reqs (Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) k_b))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (ctx_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0
else va_get_reg64 rRdi va_s0) in let (in_ptr:(va_int_range 0 18446744073709551615)) = (if win
then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (num:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (k_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let hash_in = Vale.SHA.SHA_helpers.le_bytes_to_hash
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0)
ctx_b)) in let hash_out = Vale.SHA.SHA_helpers.le_bytes_to_hash
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem va_sM)
ctx_b)) in (let input_LE = Vale.Def.Words.Seq_s.seq_nat8_to_seq_uint8
(Vale.Def.Types_s.le_seq_quad32_to_bytes (Vale.X64.Decls.buffer128_as_seq (va_get_mem va_sM)
in_b)) in l_and (FStar.Seq.Base.length #FStar.UInt8.t input_LE `op_Modulus` 64 == 0) (hash_out
== Vale.SHA.SHA_helpers.update_multi_transparent hash_in input_LE)) /\
Vale.X64.Decls.modifies_mem (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 ctx_b)
(va_get_mem va_s0) (va_get_mem va_sM) /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\
(win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (win ==> va_get_reg64 rRbp
va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi
va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64
rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13
va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64
rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==>
va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6 va_sM ==
va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0)) /\ va_state_eq va_sM
(va_update_stackTaint va_sM (va_update_stack va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_flags va_sM (va_update_xmm 15 va_sM (va_update_xmm 14
va_sM (va_update_xmm 13 va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10
va_sM (va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6
va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2
va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM
(va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Sha_update_bytes_stdcall (win:bool) (ctx_b:buffer128) (in_b:buffer128) (num_val:nat64) | false | true | Vale.SHA.X64.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 40,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Sha_update_bytes_stdcall
(win: bool)
(ctx_b in_b: buffer128)
(num_val: nat64)
(k_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.SHA.X64.va_wp_Sha_update_bytes_stdcall | {
"file_name": "obj/Vale.SHA.X64.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
win: Prims.bool ->
ctx_b: Vale.X64.Memory.buffer128 ->
in_b: Vale.X64.Memory.buffer128 ->
num_val: Vale.X64.Memory.nat64 ->
k_b: Vale.X64.Memory.buffer128 ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 50,
"end_line": 252,
"start_col": 2,
"start_line": 178
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tuint64 = TD_Base TUInt64 | let tuint64 = | false | null | false | TD_Base TUInt64 | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"Vale.Interop.Base.TD_Base",
"Vale.Arch.HeapTypes_s.TUInt64"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq
[@__reduce__] noextract
let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public})
[@__reduce__] noextract
let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret}) | false | true | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tuint64 : Vale.Interop.Base.td | [] | Vale.Stdcalls.X64.GCMencryptOpt.tuint64 | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Vale.Interop.Base.td | {
"end_col": 29,
"end_line": 45,
"start_col": 14,
"start_line": 45
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let b128 = buf_t TUInt8 TUInt128 | let b128 = | false | null | false | buf_t TUInt8 TUInt128 | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"Vale.Interop.Base.buf_t",
"Vale.Arch.HeapTypes_s.TUInt8",
"Vale.Arch.HeapTypes_s.TUInt128"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x | false | true | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val b128 : Type0 | [] | Vale.Stdcalls.X64.GCMencryptOpt.b128 | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type0 | {
"end_col": 32,
"end_line": 37,
"start_col": 11,
"start_line": 37
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq | let t128_mod = | false | null | false | TD_Buffer TUInt8 TUInt128 default_bq | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"Vale.Interop.Base.TD_Buffer",
"Vale.Arch.HeapTypes_s.TUInt8",
"Vale.Arch.HeapTypes_s.TUInt128",
"Vale.Interop.Base.default_bq"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128 | false | true | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val t128_mod : Vale.Interop.Base.td | [] | Vale.Stdcalls.X64.GCMencryptOpt.t128_mod | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Vale.Interop.Base.td | {
"end_col": 51,
"end_line": 39,
"start_col": 15,
"start_line": 39
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret}) | let t128_no_mod = | false | null | false | TD_Buffer TUInt8 TUInt128 ({ modified = false; strict_disjointness = false; taint = MS.Secret }) | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"Vale.Interop.Base.TD_Buffer",
"Vale.Arch.HeapTypes_s.TUInt8",
"Vale.Arch.HeapTypes_s.TUInt128",
"Vale.Interop.Base.Mkbuffer_qualifiers",
"Vale.Arch.HeapTypes_s.Secret"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq
[@__reduce__] noextract
let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public}) | false | true | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val t128_no_mod : Vale.Interop.Base.td | [] | Vale.Stdcalls.X64.GCMencryptOpt.t128_no_mod | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Vale.Interop.Base.td | {
"end_col": 106,
"end_line": 43,
"start_col": 18,
"start_line": 43
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let uint64 = UInt64.t | let uint64 = | false | null | false | UInt64.t | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"FStar.UInt64.t"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s | false | true | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val uint64 : Prims.eqtype | [] | Vale.Stdcalls.X64.GCMencryptOpt.uint64 | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Prims.eqtype | {
"end_col": 21,
"end_line": 28,
"start_col": 13,
"start_line": 28
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public}) | let t128_mod_pub = | false | null | false | TD_Buffer TUInt8 TUInt128 ({ modified = true; strict_disjointness = false; taint = MS.Public }) | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"Vale.Interop.Base.TD_Buffer",
"Vale.Arch.HeapTypes_s.TUInt8",
"Vale.Arch.HeapTypes_s.TUInt128",
"Vale.Interop.Base.Mkbuffer_qualifiers",
"Vale.Arch.HeapTypes_s.Public"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq | false | true | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val t128_mod_pub : Vale.Interop.Base.td | [] | Vale.Stdcalls.X64.GCMencryptOpt.t128_mod_pub | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Vale.Interop.Base.td | {
"end_col": 106,
"end_line": 41,
"start_col": 19,
"start_line": 41
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let code_gcm128 = GC.va_code_Gcm_blocks_stdcall IA.win AES_128 | let code_gcm128 = | false | null | false | GC.va_code_Gcm_blocks_stdcall IA.win AES_128 | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"Vale.AES.X64.GCMencryptOpt.va_code_Gcm_blocks_stdcall",
"Vale.Interop.Assumptions.win",
"Vale.AES.AES_common_s.AES_128"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq
[@__reduce__] noextract
let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public})
[@__reduce__] noextract
let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret})
[@__reduce__] noextract
let tuint64 = TD_Base TUInt64
[@__reduce__] noextract
let (dom: list td{List.length dom <= 20}) =
let y = [t128_no_mod; tuint64; tuint64; t128_no_mod; t128_mod_pub; t128_no_mod;
t128_no_mod; t128_no_mod; t128_mod; tuint64; t128_no_mod; t128_mod; tuint64; t128_mod; tuint64; t128_mod; t128_mod] in
assert_norm (List.length y = 17);
y
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm128_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm128_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm128_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm128_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm128_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f
(* Prove that gcm128_lemma' has the required type *)
noextract
let gcm128_lemma (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) = as_t #(VSig.vale_sig_stdcall (gcm128_pre s iv) (gcm128_post s iv)) (gcm128_lemma' s iv) | false | true | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val code_gcm128 : Vale.X64.Decls.va_code | [] | Vale.Stdcalls.X64.GCMencryptOpt.code_gcm128 | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Vale.X64.Decls.va_code | {
"end_col": 62,
"end_line": 203,
"start_col": 18,
"start_line": 203
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let code_gcm256 = GC.va_code_Gcm_blocks_stdcall IA.win AES_256 | let code_gcm256 = | false | null | false | GC.va_code_Gcm_blocks_stdcall IA.win AES_256 | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"Vale.AES.X64.GCMencryptOpt.va_code_Gcm_blocks_stdcall",
"Vale.Interop.Assumptions.win",
"Vale.AES.AES_common_s.AES_256"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq
[@__reduce__] noextract
let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public})
[@__reduce__] noextract
let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret})
[@__reduce__] noextract
let tuint64 = TD_Base TUInt64
[@__reduce__] noextract
let (dom: list td{List.length dom <= 20}) =
let y = [t128_no_mod; tuint64; tuint64; t128_no_mod; t128_mod_pub; t128_no_mod;
t128_no_mod; t128_no_mod; t128_mod; tuint64; t128_no_mod; t128_mod; tuint64; t128_mod; tuint64; t128_mod; t128_mod] in
assert_norm (List.length y = 17);
y
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm128_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm128_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm128_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm128_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm128_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f
(* Prove that gcm128_lemma' has the required type *)
noextract
let gcm128_lemma (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) = as_t #(VSig.vale_sig_stdcall (gcm128_pre s iv) (gcm128_post s iv)) (gcm128_lemma' s iv)
noextract
let code_gcm128 = GC.va_code_Gcm_blocks_stdcall IA.win AES_128
(* Here's the type expected for the gcm wrapper *)
[@__reduce__] noextract
let lowstar_gcm128_t (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.as_lowstar_sig_t_weak_stdcall
code_gcm128
dom
[]
_
_
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win))
(* And here's the gcm wrapper itself *)
noextract
let lowstar_gcm128 (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) : lowstar_gcm128_t s iv =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.wrap_weak_stdcall
code_gcm128
dom
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win))
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm256_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm256_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm256_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm256_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm256_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f
(* Prove that gcm256_lemma' has the required type *)
noextract
let gcm256_lemma (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) = as_t #(VSig.vale_sig_stdcall (gcm256_pre s iv) (gcm256_post s iv)) (gcm256_lemma' s iv) | false | true | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val code_gcm256 : Vale.X64.Decls.va_code | [] | Vale.Stdcalls.X64.GCMencryptOpt.code_gcm256 | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Vale.X64.Decls.va_code | {
"end_col": 62,
"end_line": 375,
"start_col": 18,
"start_line": 375
} |
|
Prims.Tot | val as_normal_t (#a: Type) (x: a) : normal a | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let as_normal_t (#a:Type) (x:a) : normal a = x | val as_normal_t (#a: Type) (x: a) : normal a
let as_normal_t (#a: Type) (x: a) : normal a = | false | null | false | x | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"Vale.Interop.Base.normal"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x | false | false | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val as_normal_t (#a: Type) (x: a) : normal a | [] | Vale.Stdcalls.X64.GCMencryptOpt.as_normal_t | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: a -> Vale.Interop.Base.normal a | {
"end_col": 46,
"end_line": 34,
"start_col": 45,
"start_line": 34
} |
Prims.Tot | val as_t (#a: Type) (x: normal a) : a | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let as_t (#a:Type) (x:normal a) : a = x | val as_t (#a: Type) (x: normal a) : a
let as_t (#a: Type) (x: normal a) : a = | false | null | false | x | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"Vale.Interop.Base.normal"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *) | false | false | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val as_t (#a: Type) (x: normal a) : a | [] | Vale.Stdcalls.X64.GCMencryptOpt.as_t | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: Vale.Interop.Base.normal a -> a | {
"end_col": 39,
"end_line": 32,
"start_col": 38,
"start_line": 32
} |
Prims.Tot | val dom:dom: list td {List.length dom <= 20} | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let (dom: list td{List.length dom <= 20}) =
let y = [t128_no_mod; tuint64; tuint64; t128_no_mod; t128_mod_pub; t128_no_mod;
t128_no_mod; t128_no_mod; t128_mod; tuint64; t128_no_mod; t128_mod; tuint64; t128_mod; tuint64; t128_mod; t128_mod] in
assert_norm (List.length y = 17);
y | val dom:dom: list td {List.length dom <= 20}
let dom:dom: list td {List.length dom <= 20} = | false | null | false | let y =
[
t128_no_mod; tuint64; tuint64; t128_no_mod; t128_mod_pub; t128_no_mod; t128_no_mod; t128_no_mod;
t128_mod; tuint64; t128_no_mod; t128_mod; tuint64; t128_mod; tuint64; t128_mod; t128_mod
]
in
assert_norm (List.length y = 17);
y | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.b2t",
"Prims.op_Equality",
"Prims.int",
"FStar.List.Tot.Base.length",
"Vale.Interop.Base.td",
"Prims.list",
"Prims.Cons",
"Vale.Stdcalls.X64.GCMencryptOpt.t128_no_mod",
"Vale.Stdcalls.X64.GCMencryptOpt.tuint64",
"Vale.Stdcalls.X64.GCMencryptOpt.t128_mod_pub",
"Vale.Stdcalls.X64.GCMencryptOpt.t128_mod",
"Prims.Nil"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq
[@__reduce__] noextract
let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public})
[@__reduce__] noextract
let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret})
[@__reduce__] noextract
let tuint64 = TD_Base TUInt64 | false | false | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val dom:dom: list td {List.length dom <= 20} | [] | Vale.Stdcalls.X64.GCMencryptOpt.dom | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | dom: Prims.list Vale.Interop.Base.td {FStar.List.Tot.Base.length dom <= 20} | {
"end_col": 3,
"end_line": 52,
"start_col": 43,
"start_line": 48
} |
Prims.Tot | val gcm128_pre: (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE)
-> VSig.vale_pre dom | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gcm128_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s) | val gcm128_pre: (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE)
-> VSig.vale_pre dom
let gcm128_pre: (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE)
-> VSig.vale_pre dom = | false | null | false | fun
(s: Ghost.erased (Seq.seq nat32))
(iv: Ghost.erased supported_iv_LE)
(c: V.va_code)
(auth_b: b128)
(auth_bytes: uint64)
(auth_num: uint64)
(keys_b: b128)
(iv_b: b128)
(hkeys_b: b128)
(abytes_b: b128)
(in128x6_b: b128)
(out128x6_b: b128)
(len128x6_num: uint64)
(in128_b: b128)
(out128_b: b128)
(len128_num: uint64)
(inout_b: b128)
(plain_num: uint64)
(scratch_b: b128)
(tag_b: b128)
(va_s0: V.va_state)
->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_128 (as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b) (as_vale_buffer iv_b) (Ghost.reveal iv)
(as_vale_buffer hkeys_b) (as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num) (as_vale_buffer in128_b)
(as_vale_buffer out128_b) (UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s) | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat32",
"Vale.AES.GCM_s.supported_iv_LE",
"Vale.X64.Decls.va_code",
"Vale.Stdcalls.X64.GCMencryptOpt.b128",
"Vale.Stdcalls.X64.GCMencryptOpt.uint64",
"Vale.X64.Decls.va_state",
"Vale.AES.X64.GCMencryptOpt.va_req_Gcm_blocks_stdcall",
"Vale.Interop.Assumptions.win",
"Vale.AES.AES_common_s.AES_128",
"Vale.X64.MemoryAdapters.as_vale_buffer",
"Vale.Arch.HeapTypes_s.TUInt8",
"Vale.Arch.HeapTypes_s.TUInt128",
"FStar.UInt64.v",
"FStar.Ghost.reveal",
"Prims.prop"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq
[@__reduce__] noextract
let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public})
[@__reduce__] noextract
let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret})
[@__reduce__] noextract
let tuint64 = TD_Base TUInt64
[@__reduce__] noextract
let (dom: list td{List.length dom <= 20}) =
let y = [t128_no_mod; tuint64; tuint64; t128_no_mod; t128_mod_pub; t128_no_mod;
t128_no_mod; t128_no_mod; t128_mod; tuint64; t128_no_mod; t128_mod; tuint64; t128_mod; tuint64; t128_mod; t128_mod] in
assert_norm (List.length y = 17);
y
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract | false | true | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gcm128_pre: (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE)
-> VSig.vale_pre dom | [] | Vale.Stdcalls.X64.GCMencryptOpt.gcm128_pre | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Words_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE
-> Vale.AsLowStar.ValeSig.vale_pre Vale.Stdcalls.X64.GCMencryptOpt.dom | {
"end_col": 74,
"end_line": 87,
"start_col": 2,
"start_line": 57
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lowstar_gcm128_t (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.as_lowstar_sig_t_weak_stdcall
code_gcm128
dom
[]
_
_
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win)) | let lowstar_gcm128_t (s: Ghost.erased (Seq.seq nat32)) (iv: Ghost.erased supported_iv_LE) = | false | null | false | assert_norm (List.length dom + List.length ([] <: list arg) <= 20);
IX64.as_lowstar_sig_t_weak_stdcall code_gcm128
dom
[]
_
_
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win)) | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat32",
"Vale.AES.GCM_s.supported_iv_LE",
"Vale.Interop.X64.as_lowstar_sig_t_weak_stdcall",
"Vale.Stdcalls.X64.GCMencryptOpt.code_gcm128",
"Vale.Stdcalls.X64.GCMencryptOpt.dom",
"Prims.Nil",
"Vale.Interop.Base.arg",
"Vale.AsLowStar.Wrapper.pre_rel_generic",
"Vale.Interop.X64.max_stdcall",
"Vale.Interop.X64.arg_reg_stdcall",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm128_pre",
"Vale.AsLowStar.Wrapper.post_rel_generic",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm128_post",
"Vale.AsLowStar.Wrapper.mk_prediction",
"Vale.Interop.X64.regs_modified_stdcall",
"Vale.Interop.X64.xmms_modified_stdcall",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm128_lemma",
"Vale.Interop.Assumptions.win",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.List.Tot.Base.length",
"Vale.Interop.Base.td",
"Prims.list"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq
[@__reduce__] noextract
let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public})
[@__reduce__] noextract
let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret})
[@__reduce__] noextract
let tuint64 = TD_Base TUInt64
[@__reduce__] noextract
let (dom: list td{List.length dom <= 20}) =
let y = [t128_no_mod; tuint64; tuint64; t128_no_mod; t128_mod_pub; t128_no_mod;
t128_no_mod; t128_no_mod; t128_mod; tuint64; t128_no_mod; t128_mod; tuint64; t128_mod; tuint64; t128_mod; t128_mod] in
assert_norm (List.length y = 17);
y
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm128_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm128_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm128_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm128_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm128_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f
(* Prove that gcm128_lemma' has the required type *)
noextract
let gcm128_lemma (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) = as_t #(VSig.vale_sig_stdcall (gcm128_pre s iv) (gcm128_post s iv)) (gcm128_lemma' s iv)
noextract
let code_gcm128 = GC.va_code_Gcm_blocks_stdcall IA.win AES_128
(* Here's the type expected for the gcm wrapper *)
[@__reduce__] noextract | false | true | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lowstar_gcm128_t : s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Types_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE
-> Type0 | [] | Vale.Stdcalls.X64.GCMencryptOpt.lowstar_gcm128_t | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Types_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE
-> Type0 | {
"end_col": 81,
"end_line": 215,
"start_col": 2,
"start_line": 208
} |
|
Prims.Tot | val lowstar_gcm256 (s: Ghost.erased (Seq.seq nat32)) (iv: Ghost.erased supported_iv_LE)
: lowstar_gcm256_t s iv | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lowstar_gcm256 (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) : lowstar_gcm256_t s iv =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.wrap_weak_stdcall
code_gcm256
dom
(W.mk_prediction code_gcm256 dom [] ((gcm256_lemma s iv) code_gcm256 IA.win)) | val lowstar_gcm256 (s: Ghost.erased (Seq.seq nat32)) (iv: Ghost.erased supported_iv_LE)
: lowstar_gcm256_t s iv
let lowstar_gcm256 (s: Ghost.erased (Seq.seq nat32)) (iv: Ghost.erased supported_iv_LE)
: lowstar_gcm256_t s iv = | false | null | false | assert_norm (List.length dom + List.length ([] <: list arg) <= 20);
IX64.wrap_weak_stdcall code_gcm256
dom
(W.mk_prediction code_gcm256 dom [] ((gcm256_lemma s iv) code_gcm256 IA.win)) | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat32",
"Vale.AES.GCM_s.supported_iv_LE",
"Vale.Interop.X64.wrap_weak_stdcall",
"Vale.Stdcalls.X64.GCMencryptOpt.code_gcm256",
"Vale.Stdcalls.X64.GCMencryptOpt.dom",
"Vale.AsLowStar.Wrapper.pre_rel_generic",
"Vale.Interop.X64.max_stdcall",
"Vale.Interop.X64.arg_reg_stdcall",
"Prims.Nil",
"Vale.Interop.Base.arg",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm256_pre",
"Vale.AsLowStar.Wrapper.post_rel_generic",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm256_post",
"Vale.AsLowStar.Wrapper.mk_prediction",
"Vale.Interop.X64.regs_modified_stdcall",
"Vale.Interop.X64.xmms_modified_stdcall",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm256_lemma",
"Vale.Interop.Assumptions.win",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.List.Tot.Base.length",
"Vale.Interop.Base.td",
"Prims.list",
"Vale.Stdcalls.X64.GCMencryptOpt.lowstar_gcm256_t"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq
[@__reduce__] noextract
let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public})
[@__reduce__] noextract
let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret})
[@__reduce__] noextract
let tuint64 = TD_Base TUInt64
[@__reduce__] noextract
let (dom: list td{List.length dom <= 20}) =
let y = [t128_no_mod; tuint64; tuint64; t128_no_mod; t128_mod_pub; t128_no_mod;
t128_no_mod; t128_no_mod; t128_mod; tuint64; t128_no_mod; t128_mod; tuint64; t128_mod; tuint64; t128_mod; t128_mod] in
assert_norm (List.length y = 17);
y
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm128_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm128_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm128_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm128_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm128_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f
(* Prove that gcm128_lemma' has the required type *)
noextract
let gcm128_lemma (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) = as_t #(VSig.vale_sig_stdcall (gcm128_pre s iv) (gcm128_post s iv)) (gcm128_lemma' s iv)
noextract
let code_gcm128 = GC.va_code_Gcm_blocks_stdcall IA.win AES_128
(* Here's the type expected for the gcm wrapper *)
[@__reduce__] noextract
let lowstar_gcm128_t (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.as_lowstar_sig_t_weak_stdcall
code_gcm128
dom
[]
_
_
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win))
(* And here's the gcm wrapper itself *)
noextract
let lowstar_gcm128 (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) : lowstar_gcm128_t s iv =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.wrap_weak_stdcall
code_gcm128
dom
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win))
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm256_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm256_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm256_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm256_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm256_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f
(* Prove that gcm256_lemma' has the required type *)
noextract
let gcm256_lemma (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) = as_t #(VSig.vale_sig_stdcall (gcm256_pre s iv) (gcm256_post s iv)) (gcm256_lemma' s iv)
noextract
let code_gcm256 = GC.va_code_Gcm_blocks_stdcall IA.win AES_256
(* Here's the type expected for the gcm wrapper *)
[@__reduce__] noextract
let lowstar_gcm256_t (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.as_lowstar_sig_t_weak_stdcall
code_gcm256
dom
[]
_
_
(W.mk_prediction code_gcm256 dom [] ((gcm256_lemma s iv) code_gcm256 IA.win))
(* And here's the gcm wrapper itself *)
noextract | false | false | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lowstar_gcm256 (s: Ghost.erased (Seq.seq nat32)) (iv: Ghost.erased supported_iv_LE)
: lowstar_gcm256_t s iv | [] | Vale.Stdcalls.X64.GCMencryptOpt.lowstar_gcm256 | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Types_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE
-> Vale.Stdcalls.X64.GCMencryptOpt.lowstar_gcm256_t s iv | {
"end_col": 81,
"end_line": 396,
"start_col": 2,
"start_line": 392
} |
Prims.Tot | val lowstar_gcm128 (s: Ghost.erased (Seq.seq nat32)) (iv: Ghost.erased supported_iv_LE)
: lowstar_gcm128_t s iv | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lowstar_gcm128 (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) : lowstar_gcm128_t s iv =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.wrap_weak_stdcall
code_gcm128
dom
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win)) | val lowstar_gcm128 (s: Ghost.erased (Seq.seq nat32)) (iv: Ghost.erased supported_iv_LE)
: lowstar_gcm128_t s iv
let lowstar_gcm128 (s: Ghost.erased (Seq.seq nat32)) (iv: Ghost.erased supported_iv_LE)
: lowstar_gcm128_t s iv = | false | null | false | assert_norm (List.length dom + List.length ([] <: list arg) <= 20);
IX64.wrap_weak_stdcall code_gcm128
dom
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win)) | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat32",
"Vale.AES.GCM_s.supported_iv_LE",
"Vale.Interop.X64.wrap_weak_stdcall",
"Vale.Stdcalls.X64.GCMencryptOpt.code_gcm128",
"Vale.Stdcalls.X64.GCMencryptOpt.dom",
"Vale.AsLowStar.Wrapper.pre_rel_generic",
"Vale.Interop.X64.max_stdcall",
"Vale.Interop.X64.arg_reg_stdcall",
"Prims.Nil",
"Vale.Interop.Base.arg",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm128_pre",
"Vale.AsLowStar.Wrapper.post_rel_generic",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm128_post",
"Vale.AsLowStar.Wrapper.mk_prediction",
"Vale.Interop.X64.regs_modified_stdcall",
"Vale.Interop.X64.xmms_modified_stdcall",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm128_lemma",
"Vale.Interop.Assumptions.win",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.List.Tot.Base.length",
"Vale.Interop.Base.td",
"Prims.list",
"Vale.Stdcalls.X64.GCMencryptOpt.lowstar_gcm128_t"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq
[@__reduce__] noextract
let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public})
[@__reduce__] noextract
let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret})
[@__reduce__] noextract
let tuint64 = TD_Base TUInt64
[@__reduce__] noextract
let (dom: list td{List.length dom <= 20}) =
let y = [t128_no_mod; tuint64; tuint64; t128_no_mod; t128_mod_pub; t128_no_mod;
t128_no_mod; t128_no_mod; t128_mod; tuint64; t128_no_mod; t128_mod; tuint64; t128_mod; tuint64; t128_mod; t128_mod] in
assert_norm (List.length y = 17);
y
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm128_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm128_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm128_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm128_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm128_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f
(* Prove that gcm128_lemma' has the required type *)
noextract
let gcm128_lemma (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) = as_t #(VSig.vale_sig_stdcall (gcm128_pre s iv) (gcm128_post s iv)) (gcm128_lemma' s iv)
noextract
let code_gcm128 = GC.va_code_Gcm_blocks_stdcall IA.win AES_128
(* Here's the type expected for the gcm wrapper *)
[@__reduce__] noextract
let lowstar_gcm128_t (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.as_lowstar_sig_t_weak_stdcall
code_gcm128
dom
[]
_
_
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win))
(* And here's the gcm wrapper itself *)
noextract | false | false | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lowstar_gcm128 (s: Ghost.erased (Seq.seq nat32)) (iv: Ghost.erased supported_iv_LE)
: lowstar_gcm128_t s iv | [] | Vale.Stdcalls.X64.GCMencryptOpt.lowstar_gcm128 | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Types_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE
-> Vale.Stdcalls.X64.GCMencryptOpt.lowstar_gcm128_t s iv | {
"end_col": 81,
"end_line": 224,
"start_col": 2,
"start_line": 220
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lowstar_gcm256_t (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.as_lowstar_sig_t_weak_stdcall
code_gcm256
dom
[]
_
_
(W.mk_prediction code_gcm256 dom [] ((gcm256_lemma s iv) code_gcm256 IA.win)) | let lowstar_gcm256_t (s: Ghost.erased (Seq.seq nat32)) (iv: Ghost.erased supported_iv_LE) = | false | null | false | assert_norm (List.length dom + List.length ([] <: list arg) <= 20);
IX64.as_lowstar_sig_t_weak_stdcall code_gcm256
dom
[]
_
_
(W.mk_prediction code_gcm256 dom [] ((gcm256_lemma s iv) code_gcm256 IA.win)) | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat32",
"Vale.AES.GCM_s.supported_iv_LE",
"Vale.Interop.X64.as_lowstar_sig_t_weak_stdcall",
"Vale.Stdcalls.X64.GCMencryptOpt.code_gcm256",
"Vale.Stdcalls.X64.GCMencryptOpt.dom",
"Prims.Nil",
"Vale.Interop.Base.arg",
"Vale.AsLowStar.Wrapper.pre_rel_generic",
"Vale.Interop.X64.max_stdcall",
"Vale.Interop.X64.arg_reg_stdcall",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm256_pre",
"Vale.AsLowStar.Wrapper.post_rel_generic",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm256_post",
"Vale.AsLowStar.Wrapper.mk_prediction",
"Vale.Interop.X64.regs_modified_stdcall",
"Vale.Interop.X64.xmms_modified_stdcall",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm256_lemma",
"Vale.Interop.Assumptions.win",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.List.Tot.Base.length",
"Vale.Interop.Base.td",
"Prims.list"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq
[@__reduce__] noextract
let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public})
[@__reduce__] noextract
let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret})
[@__reduce__] noextract
let tuint64 = TD_Base TUInt64
[@__reduce__] noextract
let (dom: list td{List.length dom <= 20}) =
let y = [t128_no_mod; tuint64; tuint64; t128_no_mod; t128_mod_pub; t128_no_mod;
t128_no_mod; t128_no_mod; t128_mod; tuint64; t128_no_mod; t128_mod; tuint64; t128_mod; tuint64; t128_mod; t128_mod] in
assert_norm (List.length y = 17);
y
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm128_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm128_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm128_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm128_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm128_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f
(* Prove that gcm128_lemma' has the required type *)
noextract
let gcm128_lemma (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) = as_t #(VSig.vale_sig_stdcall (gcm128_pre s iv) (gcm128_post s iv)) (gcm128_lemma' s iv)
noextract
let code_gcm128 = GC.va_code_Gcm_blocks_stdcall IA.win AES_128
(* Here's the type expected for the gcm wrapper *)
[@__reduce__] noextract
let lowstar_gcm128_t (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.as_lowstar_sig_t_weak_stdcall
code_gcm128
dom
[]
_
_
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win))
(* And here's the gcm wrapper itself *)
noextract
let lowstar_gcm128 (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) : lowstar_gcm128_t s iv =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.wrap_weak_stdcall
code_gcm128
dom
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win))
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm256_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm256_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm256_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm256_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm256_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f
(* Prove that gcm256_lemma' has the required type *)
noextract
let gcm256_lemma (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) = as_t #(VSig.vale_sig_stdcall (gcm256_pre s iv) (gcm256_post s iv)) (gcm256_lemma' s iv)
noextract
let code_gcm256 = GC.va_code_Gcm_blocks_stdcall IA.win AES_256
(* Here's the type expected for the gcm wrapper *)
[@__reduce__] noextract | false | true | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lowstar_gcm256_t : s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Types_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE
-> Type0 | [] | Vale.Stdcalls.X64.GCMencryptOpt.lowstar_gcm256_t | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Types_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE
-> Type0 | {
"end_col": 81,
"end_line": 387,
"start_col": 2,
"start_line": 380
} |
|
Prims.Tot | val gcm256_post: Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE)
-> VSig.vale_post dom | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gcm256_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f | val gcm256_post: Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE)
-> VSig.vale_post dom
let gcm256_post: Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE)
-> VSig.vale_post dom = | false | null | false | fun
(s: Ghost.erased (Seq.seq nat32))
(iv: Ghost.erased supported_iv_LE)
(c: V.va_code)
(auth_b: b128)
(auth_bytes: uint64)
(auth_num: uint64)
(keys_b: b128)
(iv_b: b128)
(hkeys_b: b128)
(abytes_b: b128)
(in128x6_b: b128)
(out128x6_b: b128)
(len128x6_num: uint64)
(in128_b: b128)
(out128_b: b128)
(len128_num: uint64)
(inout_b: b128)
(plain_num: uint64)
(scratch_b: b128)
(tag_b: b128)
(va_s0: V.va_state)
(va_s1: V.va_state)
(f: V.va_fuel)
->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_256 (as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b) (as_vale_buffer iv_b) (Ghost.reveal iv)
(as_vale_buffer hkeys_b) (as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num) (as_vale_buffer in128_b)
(as_vale_buffer out128_b) (UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s) va_s1 f | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat32",
"Vale.AES.GCM_s.supported_iv_LE",
"Vale.X64.Decls.va_code",
"Vale.Stdcalls.X64.GCMencryptOpt.b128",
"Vale.Stdcalls.X64.GCMencryptOpt.uint64",
"Vale.X64.Decls.va_state",
"Vale.X64.Decls.va_fuel",
"Vale.AES.X64.GCMencryptOpt.va_ens_Gcm_blocks_stdcall",
"Vale.Interop.Assumptions.win",
"Vale.AES.AES_common_s.AES_256",
"Vale.X64.MemoryAdapters.as_vale_buffer",
"Vale.Arch.HeapTypes_s.TUInt8",
"Vale.Arch.HeapTypes_s.TUInt128",
"FStar.UInt64.v",
"FStar.Ghost.reveal",
"Prims.prop"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq
[@__reduce__] noextract
let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public})
[@__reduce__] noextract
let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret})
[@__reduce__] noextract
let tuint64 = TD_Base TUInt64
[@__reduce__] noextract
let (dom: list td{List.length dom <= 20}) =
let y = [t128_no_mod; tuint64; tuint64; t128_no_mod; t128_mod_pub; t128_no_mod;
t128_no_mod; t128_no_mod; t128_mod; tuint64; t128_no_mod; t128_mod; tuint64; t128_mod; tuint64; t128_mod; t128_mod] in
assert_norm (List.length y = 17);
y
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm128_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm128_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm128_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm128_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm128_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f
(* Prove that gcm128_lemma' has the required type *)
noextract
let gcm128_lemma (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) = as_t #(VSig.vale_sig_stdcall (gcm128_pre s iv) (gcm128_post s iv)) (gcm128_lemma' s iv)
noextract
let code_gcm128 = GC.va_code_Gcm_blocks_stdcall IA.win AES_128
(* Here's the type expected for the gcm wrapper *)
[@__reduce__] noextract
let lowstar_gcm128_t (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.as_lowstar_sig_t_weak_stdcall
code_gcm128
dom
[]
_
_
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win))
(* And here's the gcm wrapper itself *)
noextract
let lowstar_gcm128 (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) : lowstar_gcm128_t s iv =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.wrap_weak_stdcall
code_gcm128
dom
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win))
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm256_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract | false | true | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gcm256_post: Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE)
-> VSig.vale_post dom | [] | Vale.Stdcalls.X64.GCMencryptOpt.gcm256_post | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Words_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE
-> Vale.AsLowStar.ValeSig.vale_post Vale.Stdcalls.X64.GCMencryptOpt.dom | {
"end_col": 30,
"end_line": 295,
"start_col": 2,
"start_line": 263
} |
Prims.Tot | val gcm256_pre: (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE)
-> VSig.vale_pre dom | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gcm256_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s) | val gcm256_pre: (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE)
-> VSig.vale_pre dom
let gcm256_pre: (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE)
-> VSig.vale_pre dom = | false | null | false | fun
(s: Ghost.erased (Seq.seq nat32))
(iv: Ghost.erased supported_iv_LE)
(c: V.va_code)
(auth_b: b128)
(auth_bytes: uint64)
(auth_num: uint64)
(keys_b: b128)
(iv_b: b128)
(hkeys_b: b128)
(abytes_b: b128)
(in128x6_b: b128)
(out128x6_b: b128)
(len128x6_num: uint64)
(in128_b: b128)
(out128_b: b128)
(len128_num: uint64)
(inout_b: b128)
(plain_num: uint64)
(scratch_b: b128)
(tag_b: b128)
(va_s0: V.va_state)
->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_256 (as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b) (as_vale_buffer iv_b) (Ghost.reveal iv)
(as_vale_buffer hkeys_b) (as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num) (as_vale_buffer in128_b)
(as_vale_buffer out128_b) (UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s) | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat32",
"Vale.AES.GCM_s.supported_iv_LE",
"Vale.X64.Decls.va_code",
"Vale.Stdcalls.X64.GCMencryptOpt.b128",
"Vale.Stdcalls.X64.GCMencryptOpt.uint64",
"Vale.X64.Decls.va_state",
"Vale.AES.X64.GCMencryptOpt.va_req_Gcm_blocks_stdcall",
"Vale.Interop.Assumptions.win",
"Vale.AES.AES_common_s.AES_256",
"Vale.X64.MemoryAdapters.as_vale_buffer",
"Vale.Arch.HeapTypes_s.TUInt8",
"Vale.Arch.HeapTypes_s.TUInt128",
"FStar.UInt64.v",
"FStar.Ghost.reveal",
"Prims.prop"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq
[@__reduce__] noextract
let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public})
[@__reduce__] noextract
let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret})
[@__reduce__] noextract
let tuint64 = TD_Base TUInt64
[@__reduce__] noextract
let (dom: list td{List.length dom <= 20}) =
let y = [t128_no_mod; tuint64; tuint64; t128_no_mod; t128_mod_pub; t128_no_mod;
t128_no_mod; t128_no_mod; t128_mod; tuint64; t128_no_mod; t128_mod; tuint64; t128_mod; tuint64; t128_mod; t128_mod] in
assert_norm (List.length y = 17);
y
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm128_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm128_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm128_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm128_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm128_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f
(* Prove that gcm128_lemma' has the required type *)
noextract
let gcm128_lemma (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) = as_t #(VSig.vale_sig_stdcall (gcm128_pre s iv) (gcm128_post s iv)) (gcm128_lemma' s iv)
noextract
let code_gcm128 = GC.va_code_Gcm_blocks_stdcall IA.win AES_128
(* Here's the type expected for the gcm wrapper *)
[@__reduce__] noextract
let lowstar_gcm128_t (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.as_lowstar_sig_t_weak_stdcall
code_gcm128
dom
[]
_
_
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win))
(* And here's the gcm wrapper itself *)
noextract
let lowstar_gcm128 (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) : lowstar_gcm128_t s iv =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.wrap_weak_stdcall
code_gcm128
dom
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win))
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract | false | true | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gcm256_pre: (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE)
-> VSig.vale_pre dom | [] | Vale.Stdcalls.X64.GCMencryptOpt.gcm256_pre | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Words_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE
-> Vale.AsLowStar.ValeSig.vale_pre Vale.Stdcalls.X64.GCMencryptOpt.dom | {
"end_col": 74,
"end_line": 259,
"start_col": 2,
"start_line": 229
} |
Prims.Tot | val gcm128_post: Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE)
-> VSig.vale_post dom | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gcm128_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f | val gcm128_post: Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE)
-> VSig.vale_post dom
let gcm128_post: Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE)
-> VSig.vale_post dom = | false | null | false | fun
(s: Ghost.erased (Seq.seq nat32))
(iv: Ghost.erased supported_iv_LE)
(c: V.va_code)
(auth_b: b128)
(auth_bytes: uint64)
(auth_num: uint64)
(keys_b: b128)
(iv_b: b128)
(hkeys_b: b128)
(abytes_b: b128)
(in128x6_b: b128)
(out128x6_b: b128)
(len128x6_num: uint64)
(in128_b: b128)
(out128_b: b128)
(len128_num: uint64)
(inout_b: b128)
(plain_num: uint64)
(scratch_b: b128)
(tag_b: b128)
(va_s0: V.va_state)
(va_s1: V.va_state)
(f: V.va_fuel)
->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_128 (as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b) (as_vale_buffer iv_b) (Ghost.reveal iv)
(as_vale_buffer hkeys_b) (as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num) (as_vale_buffer in128_b)
(as_vale_buffer out128_b) (UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s) va_s1 f | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat32",
"Vale.AES.GCM_s.supported_iv_LE",
"Vale.X64.Decls.va_code",
"Vale.Stdcalls.X64.GCMencryptOpt.b128",
"Vale.Stdcalls.X64.GCMencryptOpt.uint64",
"Vale.X64.Decls.va_state",
"Vale.X64.Decls.va_fuel",
"Vale.AES.X64.GCMencryptOpt.va_ens_Gcm_blocks_stdcall",
"Vale.Interop.Assumptions.win",
"Vale.AES.AES_common_s.AES_128",
"Vale.X64.MemoryAdapters.as_vale_buffer",
"Vale.Arch.HeapTypes_s.TUInt8",
"Vale.Arch.HeapTypes_s.TUInt128",
"FStar.UInt64.v",
"FStar.Ghost.reveal",
"Prims.prop"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq
[@__reduce__] noextract
let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public})
[@__reduce__] noextract
let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret})
[@__reduce__] noextract
let tuint64 = TD_Base TUInt64
[@__reduce__] noextract
let (dom: list td{List.length dom <= 20}) =
let y = [t128_no_mod; tuint64; tuint64; t128_no_mod; t128_mod_pub; t128_no_mod;
t128_no_mod; t128_no_mod; t128_mod; tuint64; t128_no_mod; t128_mod; tuint64; t128_mod; tuint64; t128_mod; t128_mod] in
assert_norm (List.length y = 17);
y
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm128_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract | false | true | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gcm128_post: Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE)
-> VSig.vale_post dom | [] | Vale.Stdcalls.X64.GCMencryptOpt.gcm128_post | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Words_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE
-> Vale.AsLowStar.ValeSig.vale_post Vale.Stdcalls.X64.GCMencryptOpt.dom | {
"end_col": 30,
"end_line": 123,
"start_col": 2,
"start_line": 91
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gcm128_encrypt_opt //: normal ((s:Ghost.erased (Seq.seq nat32)) -> lowstar_gcm128_t s)
= as_normal_t #((s:Ghost.erased (Seq.seq nat32)) -> (iv:Ghost.erased supported_iv_LE) -> lowstar_gcm128_t s iv) (fun (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) -> lowstar_gcm128 s iv) | let gcm128_encrypt_opt = | false | null | false | as_normal_t #(s: Ghost.erased (Seq.seq nat32) -> iv: Ghost.erased supported_iv_LE
-> lowstar_gcm128_t s iv)
(fun (s: Ghost.erased (Seq.seq nat32)) (iv: Ghost.erased supported_iv_LE) -> lowstar_gcm128 s iv) | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"Vale.Stdcalls.X64.GCMencryptOpt.as_normal_t",
"FStar.Ghost.erased",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat32",
"Vale.AES.GCM_s.supported_iv_LE",
"Vale.Stdcalls.X64.GCMencryptOpt.lowstar_gcm128_t",
"Vale.Stdcalls.X64.GCMencryptOpt.lowstar_gcm128"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq
[@__reduce__] noextract
let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public})
[@__reduce__] noextract
let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret})
[@__reduce__] noextract
let tuint64 = TD_Base TUInt64
[@__reduce__] noextract
let (dom: list td{List.length dom <= 20}) =
let y = [t128_no_mod; tuint64; tuint64; t128_no_mod; t128_mod_pub; t128_no_mod;
t128_no_mod; t128_no_mod; t128_mod; tuint64; t128_no_mod; t128_mod; tuint64; t128_mod; tuint64; t128_mod; t128_mod] in
assert_norm (List.length y = 17);
y
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm128_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm128_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm128_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm128_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm128_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f
(* Prove that gcm128_lemma' has the required type *)
noextract
let gcm128_lemma (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) = as_t #(VSig.vale_sig_stdcall (gcm128_pre s iv) (gcm128_post s iv)) (gcm128_lemma' s iv)
noextract
let code_gcm128 = GC.va_code_Gcm_blocks_stdcall IA.win AES_128
(* Here's the type expected for the gcm wrapper *)
[@__reduce__] noextract
let lowstar_gcm128_t (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.as_lowstar_sig_t_weak_stdcall
code_gcm128
dom
[]
_
_
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win))
(* And here's the gcm wrapper itself *)
noextract
let lowstar_gcm128 (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) : lowstar_gcm128_t s iv =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.wrap_weak_stdcall
code_gcm128
dom
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win))
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm256_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm256_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm256_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm256_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm256_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f
(* Prove that gcm256_lemma' has the required type *)
noextract
let gcm256_lemma (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) = as_t #(VSig.vale_sig_stdcall (gcm256_pre s iv) (gcm256_post s iv)) (gcm256_lemma' s iv)
noextract
let code_gcm256 = GC.va_code_Gcm_blocks_stdcall IA.win AES_256
(* Here's the type expected for the gcm wrapper *)
[@__reduce__] noextract
let lowstar_gcm256_t (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.as_lowstar_sig_t_weak_stdcall
code_gcm256
dom
[]
_
_
(W.mk_prediction code_gcm256 dom [] ((gcm256_lemma s iv) code_gcm256 IA.win))
(* And here's the gcm wrapper itself *)
noextract
let lowstar_gcm256 (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) : lowstar_gcm256_t s iv =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.wrap_weak_stdcall
code_gcm256
dom
(W.mk_prediction code_gcm256 dom [] ((gcm256_lemma s iv) code_gcm256 IA.win))
[@ (CCConv "stdcall") ] | false | false | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gcm128_encrypt_opt : Vale.Interop.Base.normal (
s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Types_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE
-> Vale.Stdcalls.X64.GCMencryptOpt.lowstar_gcm128_t s iv) | [] | Vale.Stdcalls.X64.GCMencryptOpt.gcm128_encrypt_opt | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Vale.Interop.Base.normal (
s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Types_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE
-> Vale.Stdcalls.X64.GCMencryptOpt.lowstar_gcm128_t s iv) | {
"end_col": 209,
"end_line": 400,
"start_col": 4,
"start_line": 400
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gcm128_lemma (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) = as_t #(VSig.vale_sig_stdcall (gcm128_pre s iv) (gcm128_post s iv)) (gcm128_lemma' s iv) | let gcm128_lemma (s: Ghost.erased (Seq.seq nat32)) (iv: Ghost.erased supported_iv_LE) = | false | null | false | as_t #(VSig.vale_sig_stdcall (gcm128_pre s iv) (gcm128_post s iv)) (gcm128_lemma' s iv) | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat32",
"Vale.AES.GCM_s.supported_iv_LE",
"Vale.Stdcalls.X64.GCMencryptOpt.as_t",
"Vale.AsLowStar.ValeSig.vale_sig_stdcall",
"Vale.Stdcalls.X64.GCMencryptOpt.dom",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm128_pre",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm128_post",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm128_lemma'"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq
[@__reduce__] noextract
let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public})
[@__reduce__] noextract
let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret})
[@__reduce__] noextract
let tuint64 = TD_Base TUInt64
[@__reduce__] noextract
let (dom: list td{List.length dom <= 20}) =
let y = [t128_no_mod; tuint64; tuint64; t128_no_mod; t128_mod_pub; t128_no_mod;
t128_no_mod; t128_no_mod; t128_mod; tuint64; t128_no_mod; t128_mod; tuint64; t128_mod; tuint64; t128_mod; t128_mod] in
assert_norm (List.length y = 17);
y
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm128_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm128_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm128_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm128_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm128_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f
(* Prove that gcm128_lemma' has the required type *) | false | false | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gcm128_lemma : s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Types_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE
-> Vale.AsLowStar.ValeSig.vale_sig_stdcall (Vale.Stdcalls.X64.GCMencryptOpt.gcm128_pre s iv)
(Vale.Stdcalls.X64.GCMencryptOpt.gcm128_post s iv) | [] | Vale.Stdcalls.X64.GCMencryptOpt.gcm128_lemma | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Types_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE
-> Vale.AsLowStar.ValeSig.vale_sig_stdcall (Vale.Stdcalls.X64.GCMencryptOpt.gcm128_pre s iv)
(Vale.Stdcalls.X64.GCMencryptOpt.gcm128_post s iv) | {
"end_col": 173,
"end_line": 200,
"start_col": 86,
"start_line": 200
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gcm256_lemma (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) = as_t #(VSig.vale_sig_stdcall (gcm256_pre s iv) (gcm256_post s iv)) (gcm256_lemma' s iv) | let gcm256_lemma (s: Ghost.erased (Seq.seq nat32)) (iv: Ghost.erased supported_iv_LE) = | false | null | false | as_t #(VSig.vale_sig_stdcall (gcm256_pre s iv) (gcm256_post s iv)) (gcm256_lemma' s iv) | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat32",
"Vale.AES.GCM_s.supported_iv_LE",
"Vale.Stdcalls.X64.GCMencryptOpt.as_t",
"Vale.AsLowStar.ValeSig.vale_sig_stdcall",
"Vale.Stdcalls.X64.GCMencryptOpt.dom",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm256_pre",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm256_post",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm256_lemma'"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq
[@__reduce__] noextract
let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public})
[@__reduce__] noextract
let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret})
[@__reduce__] noextract
let tuint64 = TD_Base TUInt64
[@__reduce__] noextract
let (dom: list td{List.length dom <= 20}) =
let y = [t128_no_mod; tuint64; tuint64; t128_no_mod; t128_mod_pub; t128_no_mod;
t128_no_mod; t128_no_mod; t128_mod; tuint64; t128_no_mod; t128_mod; tuint64; t128_mod; tuint64; t128_mod; t128_mod] in
assert_norm (List.length y = 17);
y
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm128_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm128_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm128_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm128_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm128_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f
(* Prove that gcm128_lemma' has the required type *)
noextract
let gcm128_lemma (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) = as_t #(VSig.vale_sig_stdcall (gcm128_pre s iv) (gcm128_post s iv)) (gcm128_lemma' s iv)
noextract
let code_gcm128 = GC.va_code_Gcm_blocks_stdcall IA.win AES_128
(* Here's the type expected for the gcm wrapper *)
[@__reduce__] noextract
let lowstar_gcm128_t (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.as_lowstar_sig_t_weak_stdcall
code_gcm128
dom
[]
_
_
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win))
(* And here's the gcm wrapper itself *)
noextract
let lowstar_gcm128 (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) : lowstar_gcm128_t s iv =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.wrap_weak_stdcall
code_gcm128
dom
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win))
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm256_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm256_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm256_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm256_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm256_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f
(* Prove that gcm256_lemma' has the required type *) | false | false | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gcm256_lemma : s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Types_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE
-> Vale.AsLowStar.ValeSig.vale_sig_stdcall (Vale.Stdcalls.X64.GCMencryptOpt.gcm256_pre s iv)
(Vale.Stdcalls.X64.GCMencryptOpt.gcm256_post s iv) | [] | Vale.Stdcalls.X64.GCMencryptOpt.gcm256_lemma | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Types_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE
-> Vale.AsLowStar.ValeSig.vale_sig_stdcall (Vale.Stdcalls.X64.GCMencryptOpt.gcm256_pre s iv)
(Vale.Stdcalls.X64.GCMencryptOpt.gcm256_post s iv) | {
"end_col": 173,
"end_line": 372,
"start_col": 86,
"start_line": 372
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gcm256_encrypt_opt //: normal ((s:Ghost.erased (Seq.seq nat32)) -> lowstar_gcm256_t s)
= as_normal_t #((s:Ghost.erased (Seq.seq nat32)) -> (iv:Ghost.erased supported_iv_LE) -> lowstar_gcm256_t s iv) (fun (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) -> lowstar_gcm256 s iv) | let gcm256_encrypt_opt = | false | null | false | as_normal_t #(s: Ghost.erased (Seq.seq nat32) -> iv: Ghost.erased supported_iv_LE
-> lowstar_gcm256_t s iv)
(fun (s: Ghost.erased (Seq.seq nat32)) (iv: Ghost.erased supported_iv_LE) -> lowstar_gcm256 s iv) | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [
"total"
] | [
"Vale.Stdcalls.X64.GCMencryptOpt.as_normal_t",
"FStar.Ghost.erased",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat32",
"Vale.AES.GCM_s.supported_iv_LE",
"Vale.Stdcalls.X64.GCMencryptOpt.lowstar_gcm256_t",
"Vale.Stdcalls.X64.GCMencryptOpt.lowstar_gcm256"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq
[@__reduce__] noextract
let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public})
[@__reduce__] noextract
let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret})
[@__reduce__] noextract
let tuint64 = TD_Base TUInt64
[@__reduce__] noextract
let (dom: list td{List.length dom <= 20}) =
let y = [t128_no_mod; tuint64; tuint64; t128_no_mod; t128_mod_pub; t128_no_mod;
t128_no_mod; t128_no_mod; t128_mod; tuint64; t128_no_mod; t128_mod; tuint64; t128_mod; tuint64; t128_mod; t128_mod] in
assert_norm (List.length y = 17);
y
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm128_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm128_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm128_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm128_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm128_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f
(* Prove that gcm128_lemma' has the required type *)
noextract
let gcm128_lemma (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) = as_t #(VSig.vale_sig_stdcall (gcm128_pre s iv) (gcm128_post s iv)) (gcm128_lemma' s iv)
noextract
let code_gcm128 = GC.va_code_Gcm_blocks_stdcall IA.win AES_128
(* Here's the type expected for the gcm wrapper *)
[@__reduce__] noextract
let lowstar_gcm128_t (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.as_lowstar_sig_t_weak_stdcall
code_gcm128
dom
[]
_
_
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win))
(* And here's the gcm wrapper itself *)
noextract
let lowstar_gcm128 (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) : lowstar_gcm128_t s iv =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.wrap_weak_stdcall
code_gcm128
dom
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win))
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm256_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm256_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm256_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm256_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm256_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f
(* Prove that gcm256_lemma' has the required type *)
noextract
let gcm256_lemma (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) = as_t #(VSig.vale_sig_stdcall (gcm256_pre s iv) (gcm256_post s iv)) (gcm256_lemma' s iv)
noextract
let code_gcm256 = GC.va_code_Gcm_blocks_stdcall IA.win AES_256
(* Here's the type expected for the gcm wrapper *)
[@__reduce__] noextract
let lowstar_gcm256_t (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.as_lowstar_sig_t_weak_stdcall
code_gcm256
dom
[]
_
_
(W.mk_prediction code_gcm256 dom [] ((gcm256_lemma s iv) code_gcm256 IA.win))
(* And here's the gcm wrapper itself *)
noextract
let lowstar_gcm256 (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) : lowstar_gcm256_t s iv =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.wrap_weak_stdcall
code_gcm256
dom
(W.mk_prediction code_gcm256 dom [] ((gcm256_lemma s iv) code_gcm256 IA.win))
[@ (CCConv "stdcall") ]
let gcm128_encrypt_opt //: normal ((s:Ghost.erased (Seq.seq nat32)) -> lowstar_gcm128_t s)
= as_normal_t #((s:Ghost.erased (Seq.seq nat32)) -> (iv:Ghost.erased supported_iv_LE) -> lowstar_gcm128_t s iv) (fun (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) -> lowstar_gcm128 s iv)
[@ (CCConv "stdcall") ] | false | false | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gcm256_encrypt_opt : Vale.Interop.Base.normal (
s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Types_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE
-> Vale.Stdcalls.X64.GCMencryptOpt.lowstar_gcm256_t s iv) | [] | Vale.Stdcalls.X64.GCMencryptOpt.gcm256_encrypt_opt | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Vale.Interop.Base.normal (
s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Types_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE
-> Vale.Stdcalls.X64.GCMencryptOpt.lowstar_gcm256_t s iv) | {
"end_col": 209,
"end_line": 404,
"start_col": 4,
"start_line": 404
} |
|
Prims.Ghost | val gcm128_lemma'
(s: Ghost.erased (Seq.seq nat32))
(iv: Ghost.erased supported_iv_LE)
(code: V.va_code)
(_win: bool)
(auth_b: b128)
(auth_bytes auth_num: uint64)
(keys_b iv_b hkeys_b abytes_b in128x6_b out128x6_b: b128)
(len128x6_num: uint64)
(in128_b out128_b: b128)
(len128_num: uint64)
(inout_b: b128)
(plain_num: uint64)
(scratch_b tag_b: b128)
(va_s0: V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm128_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b in128x6_b
out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b
va_s0)
(ensures
(fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\ VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm128_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b in128x6_b
out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b
va_s0 va_s1 f /\ ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\ ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b))) | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gcm128_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm128_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm128_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f | val gcm128_lemma'
(s: Ghost.erased (Seq.seq nat32))
(iv: Ghost.erased supported_iv_LE)
(code: V.va_code)
(_win: bool)
(auth_b: b128)
(auth_bytes auth_num: uint64)
(keys_b iv_b hkeys_b abytes_b in128x6_b out128x6_b: b128)
(len128x6_num: uint64)
(in128_b out128_b: b128)
(len128_num: uint64)
(inout_b: b128)
(plain_num: uint64)
(scratch_b tag_b: b128)
(va_s0: V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm128_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b in128x6_b
out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b
va_s0)
(ensures
(fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\ VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm128_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b in128x6_b
out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b
va_s0 va_s1 f /\ ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\ ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)))
let gcm128_lemma'
(s: Ghost.erased (Seq.seq nat32))
(iv: Ghost.erased supported_iv_LE)
(code: V.va_code)
(_win: bool)
(auth_b: b128)
(auth_bytes auth_num: uint64)
(keys_b iv_b hkeys_b abytes_b in128x6_b out128x6_b: b128)
(len128x6_num: uint64)
(in128_b out128_b: b128)
(len128_num: uint64)
(inout_b: b128)
(plain_num: uint64)
(scratch_b tag_b: b128)
(va_s0: V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm128_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b in128x6_b
out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b
va_s0)
(ensures
(fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\ VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm128_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b in128x6_b
out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b
va_s0 va_s1 f /\ ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\ ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b))) = | false | null | false | let va_s1, f =
GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_128 (as_vale_buffer auth_b)
(UInt64.v auth_bytes) (UInt64.v auth_num) (as_vale_buffer keys_b) (as_vale_buffer iv_b)
(Ghost.reveal iv) (as_vale_buffer hkeys_b) (as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num) (as_vale_buffer in128_b)
(as_vale_buffer out128_b) (UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [] | [
"FStar.Ghost.erased",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat32",
"Vale.AES.GCM_s.supported_iv_LE",
"Vale.X64.Decls.va_code",
"Prims.bool",
"Vale.Stdcalls.X64.GCMencryptOpt.b128",
"Vale.Stdcalls.X64.GCMencryptOpt.uint64",
"Vale.X64.Decls.va_state",
"Vale.X64.Decls.va_fuel",
"FStar.Pervasives.Native.Mktuple2",
"Prims.unit",
"Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal",
"Vale.Arch.HeapTypes_s.TUInt8",
"Vale.Arch.HeapTypes_s.TUInt128",
"FStar.Pervasives.Native.tuple2",
"Vale.X64.State.vale_state",
"Vale.AES.X64.GCMencryptOpt.va_lemma_Gcm_blocks_stdcall",
"Vale.Interop.Assumptions.win",
"Vale.AES.AES_common_s.AES_128",
"Vale.X64.MemoryAdapters.as_vale_buffer",
"FStar.UInt64.v",
"FStar.Ghost.reveal",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm128_pre",
"Prims.l_and",
"Vale.X64.Decls.eval_code",
"Vale.AsLowStar.ValeSig.vale_calling_conventions_stdcall",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm128_post",
"Vale.X64.Memory.buffer_writeable"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq
[@__reduce__] noextract
let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public})
[@__reduce__] noextract
let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret})
[@__reduce__] noextract
let tuint64 = TD_Base TUInt64
[@__reduce__] noextract
let (dom: list td{List.length dom <= 20}) =
let y = [t128_no_mod; tuint64; tuint64; t128_no_mod; t128_mod_pub; t128_no_mod;
t128_no_mod; t128_no_mod; t128_mod; tuint64; t128_no_mod; t128_mod; tuint64; t128_mod; tuint64; t128_mod; t128_mod] in
assert_norm (List.length y = 17);
y
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm128_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm128_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm128_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm128_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm128_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\ | false | false | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gcm128_lemma'
(s: Ghost.erased (Seq.seq nat32))
(iv: Ghost.erased supported_iv_LE)
(code: V.va_code)
(_win: bool)
(auth_b: b128)
(auth_bytes auth_num: uint64)
(keys_b iv_b hkeys_b abytes_b in128x6_b out128x6_b: b128)
(len128x6_num: uint64)
(in128_b out128_b: b128)
(len128_num: uint64)
(inout_b: b128)
(plain_num: uint64)
(scratch_b tag_b: b128)
(va_s0: V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm128_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b in128x6_b
out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b
va_s0)
(ensures
(fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\ VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm128_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b in128x6_b
out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b
va_s0 va_s1 f /\ ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\ ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b))) | [] | Vale.Stdcalls.X64.GCMencryptOpt.gcm128_lemma' | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Types_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE ->
code: Vale.X64.Decls.va_code ->
_win: Prims.bool ->
auth_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
auth_bytes: Vale.Stdcalls.X64.GCMencryptOpt.uint64 ->
auth_num: Vale.Stdcalls.X64.GCMencryptOpt.uint64 ->
keys_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
iv_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
hkeys_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
abytes_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
in128x6_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
out128x6_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
len128x6_num: Vale.Stdcalls.X64.GCMencryptOpt.uint64 ->
in128_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
out128_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
len128_num: Vale.Stdcalls.X64.GCMencryptOpt.uint64 ->
inout_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
plain_num: Vale.Stdcalls.X64.GCMencryptOpt.uint64 ->
scratch_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
tag_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
va_s0: Vale.X64.Decls.va_state
-> Prims.Ghost (Vale.X64.Decls.va_state * Vale.X64.Decls.va_fuel) | {
"end_col": 11,
"end_line": 196,
"start_col": 5,
"start_line": 172
} |
Prims.Ghost | val gcm256_lemma'
(s: Ghost.erased (Seq.seq nat32))
(iv: Ghost.erased supported_iv_LE)
(code: V.va_code)
(_win: bool)
(auth_b: b128)
(auth_bytes auth_num: uint64)
(keys_b iv_b hkeys_b abytes_b in128x6_b out128x6_b: b128)
(len128x6_num: uint64)
(in128_b out128_b: b128)
(len128_num: uint64)
(inout_b: b128)
(plain_num: uint64)
(scratch_b tag_b: b128)
(va_s0: V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm256_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b in128x6_b
out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b
va_s0)
(ensures
(fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\ VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm256_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b in128x6_b
out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b
va_s0 va_s1 f /\ ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\ ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b))) | [
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AES.X64.GCMencryptOpt",
"short_module": "GC"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": false,
"full_module": "Vale.X64.MemoryAdapters",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.Wrapper",
"short_module": "W"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.LowStarSig",
"short_module": "LSig"
},
{
"abbrev": true,
"full_module": "Vale.AsLowStar.ValeSig",
"short_module": "VSig"
},
{
"abbrev": true,
"full_module": "Vale.Interop.X64",
"short_module": "IX64"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Stdcalls.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gcm256_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm256_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm256_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f | val gcm256_lemma'
(s: Ghost.erased (Seq.seq nat32))
(iv: Ghost.erased supported_iv_LE)
(code: V.va_code)
(_win: bool)
(auth_b: b128)
(auth_bytes auth_num: uint64)
(keys_b iv_b hkeys_b abytes_b in128x6_b out128x6_b: b128)
(len128x6_num: uint64)
(in128_b out128_b: b128)
(len128_num: uint64)
(inout_b: b128)
(plain_num: uint64)
(scratch_b tag_b: b128)
(va_s0: V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm256_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b in128x6_b
out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b
va_s0)
(ensures
(fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\ VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm256_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b in128x6_b
out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b
va_s0 va_s1 f /\ ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\ ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)))
let gcm256_lemma'
(s: Ghost.erased (Seq.seq nat32))
(iv: Ghost.erased supported_iv_LE)
(code: V.va_code)
(_win: bool)
(auth_b: b128)
(auth_bytes auth_num: uint64)
(keys_b iv_b hkeys_b abytes_b in128x6_b out128x6_b: b128)
(len128x6_num: uint64)
(in128_b out128_b: b128)
(len128_num: uint64)
(inout_b: b128)
(plain_num: uint64)
(scratch_b tag_b: b128)
(va_s0: V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm256_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b in128x6_b
out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b
va_s0)
(ensures
(fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\ VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm256_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b in128x6_b
out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b
va_s0 va_s1 f /\ ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\ ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b))) = | false | null | false | let va_s1, f =
GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_256 (as_vale_buffer auth_b)
(UInt64.v auth_bytes) (UInt64.v auth_num) (as_vale_buffer keys_b) (as_vale_buffer iv_b)
(Ghost.reveal iv) (as_vale_buffer hkeys_b) (as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num) (as_vale_buffer in128_b)
(as_vale_buffer out128_b) (UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f | {
"checked_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.MemoryAdapters.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Interop.X64.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.AsLowStar.Wrapper.fsti.checked",
"Vale.AsLowStar.ValeSig.fst.checked",
"Vale.AsLowStar.MemoryHelpers.fsti.checked",
"Vale.AsLowStar.LowStarSig.fst.checked",
"Vale.AES.X64.GCMencryptOpt.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Stdcalls.X64.GCMencryptOpt.fst"
} | [] | [
"FStar.Ghost.erased",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat32",
"Vale.AES.GCM_s.supported_iv_LE",
"Vale.X64.Decls.va_code",
"Prims.bool",
"Vale.Stdcalls.X64.GCMencryptOpt.b128",
"Vale.Stdcalls.X64.GCMencryptOpt.uint64",
"Vale.X64.Decls.va_state",
"Vale.X64.Decls.va_fuel",
"FStar.Pervasives.Native.Mktuple2",
"Prims.unit",
"Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal",
"Vale.Arch.HeapTypes_s.TUInt8",
"Vale.Arch.HeapTypes_s.TUInt128",
"FStar.Pervasives.Native.tuple2",
"Vale.X64.State.vale_state",
"Vale.AES.X64.GCMencryptOpt.va_lemma_Gcm_blocks_stdcall",
"Vale.Interop.Assumptions.win",
"Vale.AES.AES_common_s.AES_256",
"Vale.X64.MemoryAdapters.as_vale_buffer",
"FStar.UInt64.v",
"FStar.Ghost.reveal",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm256_pre",
"Prims.l_and",
"Vale.X64.Decls.eval_code",
"Vale.AsLowStar.ValeSig.vale_calling_conventions_stdcall",
"Vale.Stdcalls.X64.GCMencryptOpt.gcm256_post",
"Vale.X64.Memory.buffer_writeable"
] | [] | module Vale.Stdcalls.X64.GCMencryptOpt
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open FStar.Mul
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
open Vale.Def.Types_s
open Vale.Interop.Base
module IX64 = Vale.Interop.X64
module VSig = Vale.AsLowStar.ValeSig
module LSig = Vale.AsLowStar.LowStarSig
module ME = Vale.X64.Memory
module V = Vale.X64.Decls
module IA = Vale.Interop.Assumptions
module W = Vale.AsLowStar.Wrapper
open Vale.X64.MemoryAdapters
module VS = Vale.X64.State
module MS = Vale.X64.Machine_s
module GC = Vale.AES.X64.GCMencryptOpt
open Vale.AES.AES_s
open Vale.AES.GCM_s
let uint64 = UInt64.t
(* A little utility to trigger normalization in types *)
noextract
let as_t (#a:Type) (x:normal a) : a = x
noextract
let as_normal_t (#a:Type) (x:a) : normal a = x
[@__reduce__] noextract
let b128 = buf_t TUInt8 TUInt128
[@__reduce__] noextract
let t128_mod = TD_Buffer TUInt8 TUInt128 default_bq
[@__reduce__] noextract
let t128_mod_pub = TD_Buffer TUInt8 TUInt128 ({modified=true; strict_disjointness=false; taint=MS.Public})
[@__reduce__] noextract
let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret})
[@__reduce__] noextract
let tuint64 = TD_Base TUInt64
[@__reduce__] noextract
let (dom: list td{List.length dom <= 20}) =
let y = [t128_no_mod; tuint64; tuint64; t128_no_mod; t128_mod_pub; t128_no_mod;
t128_no_mod; t128_no_mod; t128_mod; tuint64; t128_no_mod; t128_mod; tuint64; t128_mod; tuint64; t128_mod; t128_mod] in
assert_norm (List.length y = 17);
y
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm128_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm128_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm128_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm128_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm128_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b)
)) =
let va_s1, f = GC.va_lemma_Gcm_blocks_stdcall code va_s0 IA.win AES_128
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) in
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 auth_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 keys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 iv_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 hkeys_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 abytes_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128x6_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 out128_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 inout_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 scratch_b;
Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 tag_b;
va_s1, f
(* Prove that gcm128_lemma' has the required type *)
noextract
let gcm128_lemma (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) = as_t #(VSig.vale_sig_stdcall (gcm128_pre s iv) (gcm128_post s iv)) (gcm128_lemma' s iv)
noextract
let code_gcm128 = GC.va_code_Gcm_blocks_stdcall IA.win AES_128
(* Here's the type expected for the gcm wrapper *)
[@__reduce__] noextract
let lowstar_gcm128_t (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.as_lowstar_sig_t_weak_stdcall
code_gcm128
dom
[]
_
_
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win))
(* And here's the gcm wrapper itself *)
noextract
let lowstar_gcm128 (s:Ghost.erased (Seq.seq nat32)) (iv:Ghost.erased supported_iv_LE) : lowstar_gcm128_t s iv =
assert_norm (List.length dom + List.length ([]<:list arg) <= 20);
IX64.wrap_weak_stdcall
code_gcm128
dom
(W.mk_prediction code_gcm128 dom [] ((gcm128_lemma s iv) code_gcm128 IA.win))
(* Need to rearrange the order of arguments *)
[@__reduce__] noextract
let gcm256_pre : (Ghost.erased (Seq.seq nat32)) -> (Ghost.erased supported_iv_LE) -> VSig.vale_pre dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state) ->
GC.va_req_Gcm_blocks_stdcall c va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b)
(UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b) (Ghost.reveal s)
[@__reduce__] noextract
let gcm256_post : Ghost.erased (Seq.seq nat32) -> (Ghost.erased supported_iv_LE) -> VSig.vale_post dom =
fun (s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(c:V.va_code)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
(va_s1:V.va_state)
(f:V.va_fuel) ->
GC.va_ens_Gcm_blocks_stdcall c va_s0 IA.win AES_256
(as_vale_buffer auth_b) (UInt64.v auth_bytes)
(UInt64.v auth_num) (as_vale_buffer keys_b)
(as_vale_buffer iv_b) (Ghost.reveal iv) (as_vale_buffer hkeys_b)
(as_vale_buffer abytes_b) (as_vale_buffer in128x6_b)
(as_vale_buffer out128x6_b) (UInt64.v len128x6_num)
(as_vale_buffer in128_b) (as_vale_buffer out128_b)
(UInt64.v len128_num) (as_vale_buffer inout_b) (UInt64.v plain_num)
(as_vale_buffer scratch_b) (as_vale_buffer tag_b)
(Ghost.reveal s) va_s1 f
#set-options "--z3rlimit 50"
[@__reduce__] noextract
let gcm256_lemma'
(s:Ghost.erased (Seq.seq nat32))
(iv:Ghost.erased supported_iv_LE)
(code:V.va_code)
(_win:bool)
(auth_b:b128)
(auth_bytes:uint64)
(auth_num:uint64)
(keys_b:b128)
(iv_b:b128)
(hkeys_b:b128)
(abytes_b:b128)
(in128x6_b:b128)
(out128x6_b:b128)
(len128x6_num:uint64)
(in128_b:b128)
(out128_b:b128)
(len128_num:uint64)
(inout_b:b128)
(plain_num:uint64)
(scratch_b:b128)
(tag_b:b128)
(va_s0:V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm256_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0)
(ensures (fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\
VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm256_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b va_s0 va_s1 f /\
ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\
ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\ | false | false | Vale.Stdcalls.X64.GCMencryptOpt.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gcm256_lemma'
(s: Ghost.erased (Seq.seq nat32))
(iv: Ghost.erased supported_iv_LE)
(code: V.va_code)
(_win: bool)
(auth_b: b128)
(auth_bytes auth_num: uint64)
(keys_b iv_b hkeys_b abytes_b in128x6_b out128x6_b: b128)
(len128x6_num: uint64)
(in128_b out128_b: b128)
(len128_num: uint64)
(inout_b: b128)
(plain_num: uint64)
(scratch_b tag_b: b128)
(va_s0: V.va_state)
: Ghost (V.va_state & V.va_fuel)
(requires
gcm256_pre s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b in128x6_b
out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b
va_s0)
(ensures
(fun (va_s1, f) ->
V.eval_code code va_s0 f va_s1 /\ VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\
gcm256_post s iv code auth_b auth_bytes auth_num keys_b iv_b hkeys_b abytes_b in128x6_b
out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b
va_s0 va_s1 f /\ ME.buffer_writeable (as_vale_buffer auth_b) /\
ME.buffer_writeable (as_vale_buffer keys_b) /\ ME.buffer_writeable (as_vale_buffer iv_b) /\
ME.buffer_writeable (as_vale_buffer hkeys_b) /\
ME.buffer_writeable (as_vale_buffer abytes_b) /\
ME.buffer_writeable (as_vale_buffer in128x6_b) /\
ME.buffer_writeable (as_vale_buffer out128x6_b) /\
ME.buffer_writeable (as_vale_buffer in128_b) /\
ME.buffer_writeable (as_vale_buffer out128_b) /\
ME.buffer_writeable (as_vale_buffer inout_b) /\
ME.buffer_writeable (as_vale_buffer scratch_b) /\
ME.buffer_writeable (as_vale_buffer tag_b))) | [] | Vale.Stdcalls.X64.GCMencryptOpt.gcm256_lemma' | {
"file_name": "vale/code/arch/x64/interop/Vale.Stdcalls.X64.GCMencryptOpt.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
s: FStar.Ghost.erased (FStar.Seq.Base.seq Vale.Def.Types_s.nat32) ->
iv: FStar.Ghost.erased Vale.AES.GCM_s.supported_iv_LE ->
code: Vale.X64.Decls.va_code ->
_win: Prims.bool ->
auth_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
auth_bytes: Vale.Stdcalls.X64.GCMencryptOpt.uint64 ->
auth_num: Vale.Stdcalls.X64.GCMencryptOpt.uint64 ->
keys_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
iv_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
hkeys_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
abytes_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
in128x6_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
out128x6_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
len128x6_num: Vale.Stdcalls.X64.GCMencryptOpt.uint64 ->
in128_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
out128_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
len128_num: Vale.Stdcalls.X64.GCMencryptOpt.uint64 ->
inout_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
plain_num: Vale.Stdcalls.X64.GCMencryptOpt.uint64 ->
scratch_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
tag_b: Vale.Stdcalls.X64.GCMencryptOpt.b128 ->
va_s0: Vale.X64.Decls.va_state
-> Prims.Ghost (Vale.X64.Decls.va_state * Vale.X64.Decls.va_fuel) | {
"end_col": 11,
"end_line": 368,
"start_col": 5,
"start_line": 344
} |
Prims.Tot | val sel (a b c: bool) : bool | [
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.BitVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.BV",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.UInt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let sel (a b c:bool) : bool = if c then a else b | val sel (a b c: bool) : bool
let sel (a b c: bool) : bool = | false | null | false | if c then a else b | {
"checked_file": "Vale.Def.Sel.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"prims.fst.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.BV.fsti.checked",
"FStar.BitVector.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Def.Sel.fst"
} | [
"total"
] | [
"Prims.bool"
] | [] | module Vale.Def.Sel
open FStar.UInt
open FStar.Seq
open FStar.BV
open FStar.BitVector
open Vale.Def.Words_s | false | true | Vale.Def.Sel.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val sel (a b c: bool) : bool | [] | Vale.Def.Sel.sel | {
"file_name": "vale/specs/defs/Vale.Def.Sel.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | a: Prims.bool -> b: Prims.bool -> c: Prims.bool -> Prims.bool | {
"end_col": 48,
"end_line": 9,
"start_col": 30,
"start_line": 9
} |
Prims.Tot | val isel32 (a b c: nat32) : nat32 | [
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.BitVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.BV",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.UInt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let isel32 (a:nat32) (b:nat32) (c:nat32) : nat32 = logsel #32 a b c | val isel32 (a b c: nat32) : nat32
let isel32 (a b c: nat32) : nat32 = | false | null | false | logsel #32 a b c | {
"checked_file": "Vale.Def.Sel.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"prims.fst.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.BV.fsti.checked",
"FStar.BitVector.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Def.Sel.fst"
} | [
"total"
] | [
"Vale.Def.Words_s.nat32",
"Vale.Def.Sel.logsel"
] | [] | module Vale.Def.Sel
open FStar.UInt
open FStar.Seq
open FStar.BV
open FStar.BitVector
open Vale.Def.Words_s
let sel (a b c:bool) : bool = if c then a else b
let rec logsel_vec (#n: pos) (a b c: bv_t n) : Tot (bv_t n) =
if n = 1
then create 1 (sel (index a 0) (index b 0) (index c 0))
else append (create 1 (sel (index a 0) (index b 0) (index c 0))) (logsel_vec #(n - 1) (slice a 1 n) (slice b 1 n) (slice c 1 n))
#push-options "--initial_fuel 1 --max_fuel 1"
let rec logsel_vec_definition (#n: pos) (a b c: bv_t n) (i: nat{i < n})
: Lemma (ensures index (logsel_vec #n a b c) i = sel (index a i) (index b i) (index c i))
[SMTPat (index (logsel_vec #n a b c) i)] =
if i = 0 then () else logsel_vec_definition #(n - 1) (slice a 1 n) (slice b 1 n) (slice c 1 n) (i - 1)
#pop-options
let logsel (#n:pos) (a:uint_t n) (b:uint_t n) (c:uint_t n) : Tot (uint_t n) =
from_vec #n (logsel_vec #n (to_vec #n a) (to_vec #n b) (to_vec #n c)) | false | true | Vale.Def.Sel.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val isel32 (a b c: nat32) : nat32 | [] | Vale.Def.Sel.isel32 | {
"file_name": "vale/specs/defs/Vale.Def.Sel.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | a: Vale.Def.Words_s.nat32 -> b: Vale.Def.Words_s.nat32 -> c: Vale.Def.Words_s.nat32
-> Vale.Def.Words_s.nat32 | {
"end_col": 74,
"end_line": 26,
"start_col": 58,
"start_line": 26
} |
Prims.Tot | val logsel (#n: pos) (a b c: uint_t n) : Tot (uint_t n) | [
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.BitVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.BV",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.UInt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let logsel (#n:pos) (a:uint_t n) (b:uint_t n) (c:uint_t n) : Tot (uint_t n) =
from_vec #n (logsel_vec #n (to_vec #n a) (to_vec #n b) (to_vec #n c)) | val logsel (#n: pos) (a b c: uint_t n) : Tot (uint_t n)
let logsel (#n: pos) (a b c: uint_t n) : Tot (uint_t n) = | false | null | false | from_vec #n (logsel_vec #n (to_vec #n a) (to_vec #n b) (to_vec #n c)) | {
"checked_file": "Vale.Def.Sel.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"prims.fst.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.BV.fsti.checked",
"FStar.BitVector.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Def.Sel.fst"
} | [
"total"
] | [
"Prims.pos",
"FStar.UInt.uint_t",
"FStar.UInt.from_vec",
"Vale.Def.Sel.logsel_vec",
"FStar.UInt.to_vec"
] | [] | module Vale.Def.Sel
open FStar.UInt
open FStar.Seq
open FStar.BV
open FStar.BitVector
open Vale.Def.Words_s
let sel (a b c:bool) : bool = if c then a else b
let rec logsel_vec (#n: pos) (a b c: bv_t n) : Tot (bv_t n) =
if n = 1
then create 1 (sel (index a 0) (index b 0) (index c 0))
else append (create 1 (sel (index a 0) (index b 0) (index c 0))) (logsel_vec #(n - 1) (slice a 1 n) (slice b 1 n) (slice c 1 n))
#push-options "--initial_fuel 1 --max_fuel 1"
let rec logsel_vec_definition (#n: pos) (a b c: bv_t n) (i: nat{i < n})
: Lemma (ensures index (logsel_vec #n a b c) i = sel (index a i) (index b i) (index c i))
[SMTPat (index (logsel_vec #n a b c) i)] =
if i = 0 then () else logsel_vec_definition #(n - 1) (slice a 1 n) (slice b 1 n) (slice c 1 n) (i - 1)
#pop-options | false | false | Vale.Def.Sel.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val logsel (#n: pos) (a b c: uint_t n) : Tot (uint_t n) | [] | Vale.Def.Sel.logsel | {
"file_name": "vale/specs/defs/Vale.Def.Sel.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | a: FStar.UInt.uint_t n -> b: FStar.UInt.uint_t n -> c: FStar.UInt.uint_t n -> FStar.UInt.uint_t n | {
"end_col": 71,
"end_line": 24,
"start_col": 2,
"start_line": 24
} |
Prims.Tot | val logsel_vec (#n: pos) (a b c: bv_t n) : Tot (bv_t n) | [
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.BitVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.BV",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.UInt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec logsel_vec (#n: pos) (a b c: bv_t n) : Tot (bv_t n) =
if n = 1
then create 1 (sel (index a 0) (index b 0) (index c 0))
else append (create 1 (sel (index a 0) (index b 0) (index c 0))) (logsel_vec #(n - 1) (slice a 1 n) (slice b 1 n) (slice c 1 n)) | val logsel_vec (#n: pos) (a b c: bv_t n) : Tot (bv_t n)
let rec logsel_vec (#n: pos) (a b c: bv_t n) : Tot (bv_t n) = | false | null | false | if n = 1
then create 1 (sel (index a 0) (index b 0) (index c 0))
else
append (create 1 (sel (index a 0) (index b 0) (index c 0)))
(logsel_vec #(n - 1) (slice a 1 n) (slice b 1 n) (slice c 1 n)) | {
"checked_file": "Vale.Def.Sel.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"prims.fst.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.BV.fsti.checked",
"FStar.BitVector.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Def.Sel.fst"
} | [
"total"
] | [
"Prims.pos",
"FStar.BitVector.bv_t",
"Prims.op_Equality",
"Prims.int",
"FStar.Seq.Base.create",
"Prims.bool",
"Vale.Def.Sel.sel",
"FStar.Seq.Base.index",
"FStar.Seq.Base.append",
"Vale.Def.Sel.logsel_vec",
"Prims.op_Subtraction",
"FStar.Seq.Base.slice"
] | [] | module Vale.Def.Sel
open FStar.UInt
open FStar.Seq
open FStar.BV
open FStar.BitVector
open Vale.Def.Words_s
let sel (a b c:bool) : bool = if c then a else b | false | false | Vale.Def.Sel.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val logsel_vec (#n: pos) (a b c: bv_t n) : Tot (bv_t n) | [
"recursion"
] | Vale.Def.Sel.logsel_vec | {
"file_name": "vale/specs/defs/Vale.Def.Sel.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | a: FStar.BitVector.bv_t n -> b: FStar.BitVector.bv_t n -> c: FStar.BitVector.bv_t n
-> FStar.BitVector.bv_t n | {
"end_col": 130,
"end_line": 14,
"start_col": 2,
"start_line": 12
} |
FStar.Pervasives.Lemma | val logsel_vec_definition (#n: pos) (a b c: bv_t n) (i: nat{i < n})
: Lemma (ensures index (logsel_vec #n a b c) i = sel (index a i) (index b i) (index c i))
[SMTPat (index (logsel_vec #n a b c) i)] | [
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.BitVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.BV",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.UInt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec logsel_vec_definition (#n: pos) (a b c: bv_t n) (i: nat{i < n})
: Lemma (ensures index (logsel_vec #n a b c) i = sel (index a i) (index b i) (index c i))
[SMTPat (index (logsel_vec #n a b c) i)] =
if i = 0 then () else logsel_vec_definition #(n - 1) (slice a 1 n) (slice b 1 n) (slice c 1 n) (i - 1) | val logsel_vec_definition (#n: pos) (a b c: bv_t n) (i: nat{i < n})
: Lemma (ensures index (logsel_vec #n a b c) i = sel (index a i) (index b i) (index c i))
[SMTPat (index (logsel_vec #n a b c) i)]
let rec logsel_vec_definition (#n: pos) (a b c: bv_t n) (i: nat{i < n})
: Lemma (ensures index (logsel_vec #n a b c) i = sel (index a i) (index b i) (index c i))
[SMTPat (index (logsel_vec #n a b c) i)] = | false | null | true | if i = 0
then ()
else logsel_vec_definition #(n - 1) (slice a 1 n) (slice b 1 n) (slice c 1 n) (i - 1) | {
"checked_file": "Vale.Def.Sel.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"prims.fst.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.BV.fsti.checked",
"FStar.BitVector.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Def.Sel.fst"
} | [
"lemma"
] | [
"Prims.pos",
"FStar.BitVector.bv_t",
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThan",
"Prims.op_Equality",
"Prims.int",
"Prims.bool",
"Vale.Def.Sel.logsel_vec_definition",
"Prims.op_Subtraction",
"FStar.Seq.Base.slice",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"FStar.Seq.Base.index",
"Vale.Def.Sel.logsel_vec",
"Vale.Def.Sel.sel",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | module Vale.Def.Sel
open FStar.UInt
open FStar.Seq
open FStar.BV
open FStar.BitVector
open Vale.Def.Words_s
let sel (a b c:bool) : bool = if c then a else b
let rec logsel_vec (#n: pos) (a b c: bv_t n) : Tot (bv_t n) =
if n = 1
then create 1 (sel (index a 0) (index b 0) (index c 0))
else append (create 1 (sel (index a 0) (index b 0) (index c 0))) (logsel_vec #(n - 1) (slice a 1 n) (slice b 1 n) (slice c 1 n))
#push-options "--initial_fuel 1 --max_fuel 1"
let rec logsel_vec_definition (#n: pos) (a b c: bv_t n) (i: nat{i < n})
: Lemma (ensures index (logsel_vec #n a b c) i = sel (index a i) (index b i) (index c i)) | false | false | Vale.Def.Sel.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val logsel_vec_definition (#n: pos) (a b c: bv_t n) (i: nat{i < n})
: Lemma (ensures index (logsel_vec #n a b c) i = sel (index a i) (index b i) (index c i))
[SMTPat (index (logsel_vec #n a b c) i)] | [
"recursion"
] | Vale.Def.Sel.logsel_vec_definition | {
"file_name": "vale/specs/defs/Vale.Def.Sel.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
a: FStar.BitVector.bv_t n ->
b: FStar.BitVector.bv_t n ->
c: FStar.BitVector.bv_t n ->
i: Prims.nat{i < n}
-> FStar.Pervasives.Lemma
(ensures
FStar.Seq.Base.index (Vale.Def.Sel.logsel_vec a b c) i =
Vale.Def.Sel.sel (FStar.Seq.Base.index a i)
(FStar.Seq.Base.index b i)
(FStar.Seq.Base.index c i))
[SMTPat (FStar.Seq.Base.index (Vale.Def.Sel.logsel_vec a b c) i)] | {
"end_col": 104,
"end_line": 20,
"start_col": 2,
"start_line": 20
} |
Prims.GTot | val irepr_v
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(#s: slice rrel rel)
(#compl: compl_t t)
(x: irepr p s compl)
: GTot t | [
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "BF"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let irepr_v
(#t: Type) (#k: parser_kind) (#p: parser k t) (#rrel #rel: _) (#s: slice rrel rel) (#compl: compl_t t) (x: irepr p s compl) : GTot t
= Ghost.reveal (IRepr?.gv x) | val irepr_v
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(#s: slice rrel rel)
(#compl: compl_t t)
(x: irepr p s compl)
: GTot t
let irepr_v
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(#s: slice rrel rel)
(#compl: compl_t t)
(x: irepr p s compl)
: GTot t = | false | null | false | Ghost.reveal (IRepr?.gv x) | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"sometrivial"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"LowParse.Low.Base.compl_t",
"LowParse.Low.Base.irepr",
"FStar.Ghost.reveal",
"LowParse.Low.Base.__proj__IRepr__item__gv"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_weak
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
U32.v sl.len < U32.v max_uint32 /\
writable sl.base (U32.v pos) (U32.v sl.len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from sl pos) h h' /\ (
if pos' = max_uint32
then U32.v pos + serialized_length s x > U32.v sl.len
else valid_content_pos p h' sl pos x pos'
)))
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_strong
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let sq = B.as_seq h sl.base in
let len = serialized_length s x in
live_slice h sl /\
U32.v pos + len <= U32.v sl.len /\
writable sl.base (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from_to sl pos pos') h h' /\
valid_content_pos p h' sl pos x pos'
))
[@unifier_hint_injective]
inline_for_extraction
let serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(b: B.mbuffer byte rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v pos + len <= B.length b /\
writable b (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b pos (pos `U32.add` len)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x
)))
inline_for_extraction
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
= fun x #rrel #rel b pos -> s1' x b pos
inline_for_extraction
let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(posl: Ghost.erased U32.t)
(posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\
U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos) /\
Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr))
)))
=
let h0 = HST.get () in
writable_weaken b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 (U32.v pos) (U32.v pos + Seq.length (serialize s x));
let res = s32 x b pos in
let h1 = HST.get () in
let pos' = pos `U32.add` res in
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos pos';
writable_modifies b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 B.loc_none h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) (Ghost.reveal posl) pos;
B.loc_disjoint_loc_buffer_from_to b (Ghost.reveal posl) pos pos pos';
B.modifies_buffer_from_to_elim b (Ghost.reveal posl) pos (B.loc_buffer_from_to b pos pos') h0 h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos' (Ghost.reveal posr);
B.loc_disjoint_loc_buffer_from_to b pos pos' pos' (Ghost.reveal posr);
B.modifies_buffer_from_to_elim b pos' (Ghost.reveal posr) (B.loc_buffer_from_to b pos pos') h0 h1;
res
inline_for_extraction
let leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s)
= fun x #rrel #rel input pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let len = s32 x input.base pos in
[@inline_let]
let pos' = pos `U32.add` len in
let h = HST.get () in
[@inline_let] let _ =
let large = bytes_of_slice_from h input pos in
let small = bytes_of_slice_from_to h input pos pos' in
parse_strong_prefix p small large;
valid_facts p h input pos
in
pos'
inline_for_extraction
let leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32
))
: Tot (leaf_writer_weak s)
= fun x #rrel #rel input pos ->
if (input.len `U32.sub` pos) `U32.lt` sz
then max_uint32
else begin
let h = HST.get () in
writable_weaken input.base (U32.v pos) (U32.v input.len) h (U32.v pos) (U32.v pos + U32.v sz);
s32 x input pos
end
inline_for_extraction
let serializer32_of_leaf_writer_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_subkind == Some ParserStrong /\
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
))
: Tot (serializer32 s)
= fun x #rrel #rel b pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let pos' = s32 x (make_slice b (pos `U32.add` sz)) pos in
[@inline_let]
let len = pos' `U32.sub` pos in
let h = HST.get () in
[@inline_let] let _ =
valid_valid_exact p h (make_slice b (pos `U32.add` sz)) pos;
valid_exact_serialize s h (make_slice b (pos `U32.add` sz)) pos pos'
in
len
inline_for_extraction
let blit_strong
(#a:Type) (#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src:U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst:U32.t)
(len:U32.t)
: HST.Stack unit
(requires (fun h ->
B.live h src /\ B.live h dst /\
U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len)) (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)
))
= let h = HST.get () in
B.blit src idx_src dst idx_dst len;
let h' = HST.get () in
B.modifies_loc_buffer_from_to_intro dst idx_dst (idx_dst `U32.add` len) B.loc_none h h'
#push-options "--z3rlimit 16"
inline_for_extraction
let copy_strong
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
U32.v dpos + U32.v spos' - U32.v spos <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from_to dst dpos (dpos `U32.add` (spos' `U32.sub` spos)))
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos' /\
dpos' `U32.sub` dpos == spos' `U32.sub` spos
))
= let h0 = HST.get () in
let len = spos' `U32.sub` spos in
valid_facts p h0 src spos;
writable_replace_subseq_elim dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h0 (Seq.slice (B.as_seq h0 src.base) (U32.v spos) (U32.v spos'));
blit_strong src.base spos dst.base dpos len;
let h = HST.get () in
[@inline_let] let dpos' = dpos `U32.add` len in
parse_strong_prefix p (bytes_of_slice_from h0 src spos) (bytes_of_slice_from h dst dpos);
valid_facts p h dst dpos;
dpos'
#pop-options
inline_for_extraction
let copy_strong'
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\ (
let clen = content_length p h src spos in
U32.v dpos + clen <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + clen) h /\
B.loc_disjoint (loc_slice_from src spos) (loc_slice_from_to dst dpos (dpos `U32.add` (U32.uint_to_t clen)))
)))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
))
= let spos' = j src spos in
copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak_with_length
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= if (dst.len `U32.sub` dpos) `U32.lt` (spos' `U32.sub` spos)
then max_uint32
else copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(jmp: jumper p)
(src: slice rrel1 rel1)
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (content_length p h src spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos (get_valid_pos p h src spos)) (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= let spos' = jmp src spos in
copy_weak_with_length p src spos spos' dst dpos
(* fold_left on lists *)
module BF = LowStar.Buffer
#push-options "--z3rlimit 256 --fuel 1 --ifuel 1"
#restart-solver
inline_for_extraction
let list_fold_left_gen
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(post_interrupt: ((h: HS.mem) -> GTot Type0))
(post_interrupt_frame: (h: HS.mem) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
post_interrupt h
)) (ensures (post_interrupt h')))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
HST.Stack bool
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos1 /\
valid_pos p h0 sl pos1 pos2 /\
valid_list p h0 sl pos2 pos' /\
inv h (contents_list p h0 sl pos pos1) (contents p h0 sl pos1 :: contents_list p h0 sl pos2 pos') pos1
))
(ensures (fun h ctinue h' ->
B.modifies (Ghost.reveal l) h h' /\
(if ctinue then inv h' (contents_list p h0 sl pos pos1 `L.append` [contents p h0 sl pos1]) (contents_list p h0 sl pos2 pos') pos2 else post_interrupt h')
))
))
: HST.Stack bool
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h res h' ->
B.modifies (Ghost.reveal l) h h' /\
(if res then inv h' (contents_list p h sl pos pos') [] pos' else post_interrupt h')
))
= HST.push_frame ();
let h1 = HST.get () in
// B.fresh_frame_modifies h0 h1;
let bpos : BF.pointer U32.t = BF.alloca pos 1ul in
let bctinue : BF.pointer bool = BF.alloca true 1ul in
let btest: BF.pointer bool = BF.alloca (pos `U32.lt` pos') 1ul in
let h2 = HST.get () in
assert (B.modifies B.loc_none h0 h2);
let test_pre (h: HS.mem) : GTot Type0 =
B.live h bpos /\ B.live h bctinue /\ B.live h btest /\ (
let pos1 = Seq.index (B.as_seq h bpos) 0 in
let ctinue = Seq.index (B.as_seq h bctinue) 0 in
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
B.modifies (Ghost.reveal l `B.loc_union` B.loc_region_only true (HS.get_tip h1)) h2 h /\
Seq.index (B.as_seq h btest) 0 == ((U32.v (Seq.index (B.as_seq h bpos) 0) < U32.v pos') && Seq.index (B.as_seq h bctinue) 0) /\
(if ctinue then inv h (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 else post_interrupt h)
)
in
let test_post (cond: bool) (h: HS.mem) : GTot Type0 =
test_pre h /\
cond == Seq.index (B.as_seq h btest) 0
in
valid_list_nil p h0 sl pos;
inv_frame h0 [] (contents_list p h0 sl pos pos') pos h1;
inv_frame h1 [] (contents_list p h0 sl pos pos') pos h2;
[@inline_let]
let while_body () : HST.Stack unit
(requires (fun h -> test_post true h))
(ensures (fun _ _ h1 -> test_pre h1))
=
let h51 = HST.get () in
let pos1 = B.index bpos 0ul in
valid_list_cons_recip p h0 sl pos1 pos';
//assert (B.modifies (Ghost.reveal l `B.loc_union` B.loc_buffer bpos) h0 h51);
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos'));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos pos1));
valid_list_cons_recip p h51 sl pos1 pos';
let pos2 = j sl pos1 in
let h52 = HST.get () in
inv_frame h51 (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 h52;
B.modifies_only_not_unused_in (Ghost.reveal l) h0 h52;
let ctinue = body pos1 pos2 in
let h53 = HST.get () in
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos2 pos'));
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h1));
valid_pos_frame_strong p h0 sl pos1 pos2 (Ghost.reveal l) h53;
valid_list_snoc p h0 sl pos pos1;
B.upd bpos 0ul pos2;
B.upd bctinue 0ul ctinue;
B.upd btest 0ul ((pos2 `U32.lt` pos') && ctinue);
let h54 = HST.get () in
[@inline_let]
let _ =
if ctinue
then inv_frame h53 (contents_list p h0 sl pos pos2) (contents_list p h0 sl pos2 pos') pos2 h54
else post_interrupt_frame h53 h54
in
()
in
C.Loops.while
#test_pre
#test_post
(fun (_: unit) -> (
B.index btest 0ul) <: HST.Stack bool (requires (fun h -> test_pre h)) (ensures (fun h x h1 -> test_post x h1)))
while_body
;
valid_list_nil p h0 sl pos';
let res = B.index bctinue 0ul in
let h3 = HST.get () in
HST.pop_frame ();
let h4 = HST.get () in
//B.popped_modifies h3 h4;
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h3));
[@inline_let]
let _ =
if res
then inv_frame h3 (contents_list p h0 sl pos pos') [] pos' h4
else post_interrupt_frame h3 h4
in
res
#pop-options
//B.loc_includes_union_l (B.loc_all_regions_from false (HS.get_tip h1)) (Ghost.reveal l) (Ghost.reveal l)
//B.modifies_fresh_frame_popped h0 h1 (Ghost.reveal l) h3 h4
module G = FStar.Ghost
inline_for_extraction
let list_fold_left
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
(l1: Ghost.erased (list t)) ->
(x: Ghost.erased t) ->
(l2: Ghost.erased (list t)) ->
HST.Stack unit
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos' /\
valid_content_pos p h0 sl pos1 (G.reveal x) pos2 /\
U32.v pos <= U32.v pos1 /\ U32.v pos2 <= U32.v pos' /\
B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2) /\
inv h (Ghost.reveal l1) (Ghost.reveal x :: Ghost.reveal l2) pos1 /\
contents_list p h0 sl pos pos' == Ghost.reveal l1 `L.append` (Ghost.reveal x :: Ghost.reveal l2)
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (Ghost.reveal l1 `L.append` [contents p h0 sl pos1]) (Ghost.reveal l2) pos2
))
))
: HST.Stack unit
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (contents_list p h sl pos pos') [] pos'
))
= let _ = list_fold_left_gen
p
j
sl
pos pos'
h0
l
inv
inv_frame
(fun _ -> False)
(fun _ _ -> ())
(fun pos1 pos2 ->
let h = HST.get () in
valid_list_cons p h sl pos1 pos';
valid_list_append p h sl pos pos1 pos';
body
pos1
pos2
(Ghost.hide (contents_list p h sl pos pos1))
(Ghost.hide (contents p h sl pos1))
(Ghost.hide (contents_list p h sl pos2 pos'))
;
true
)
in
()
inline_for_extraction
let list_length
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v res == L.length (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
B.fresh_frame_modifies h0 h1;
let blen : BF.pointer U32.t = BF.alloca 0ul 1ul in
let h2 = HST.get () in
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer blen))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_buffer blen) h2 h /\
B.live h blen /\ (
let len = U32.v (Seq.index (B.as_seq h blen) 0) in
len <= U32.v pos1 /\ // necessary to prove that length computations do not overflow
len == L.length l1
))
(fun h l1 l2 pos1 h' ->
B.modifies_only_not_unused_in (B.loc_buffer blen) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
B.upd blen 0ul (B.index blen 0ul `U32.add` 1ul);
Classical.forall_intro_2 (list_length_append #t)
)
;
let len = B.index blen 0ul in
HST.pop_frame ();
len
#push-options "--z3rlimit 32 --fuel 2 --ifuel 1"
inline_for_extraction
let list_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(f: (t -> Tot bool))
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
(x: Ghost.erased t) ->
HST.Stack bool
(requires (fun h -> valid_content p h sl pos (G.reveal x)))
(ensures (fun h res h' -> B.modifies B.loc_none h h' /\ res == f (G.reveal x)))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
(#rrel_out #rel_out: _)
(sl_out : slice rrel_out rel_out)
(pos_out : U32.t)
: HST.Stack U32.t
(requires (fun h ->
U32.v pos_out + U32.v pos' - U32.v pos <= U32.v sl_out.len /\
valid_list p h sl pos pos' /\
B.loc_disjoint (loc_slice_from_to sl pos pos') (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
live_slice h sl_out
))
(ensures (fun h pos_out' h' ->
B.modifies (loc_slice_from_to sl_out pos_out pos_out') h h' /\
U32.v pos_out' - U32.v pos_out <= U32.v pos' - U32.v pos /\
valid_list p h' sl_out pos_out pos_out' /\
contents_list p h' sl_out pos_out pos_out' == L.filter f (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
//B.fresh_frame_modifies h0 h1;
let bpos_out' : BF.pointer U32.t = BF.alloca pos_out 1ul in
let h2 = HST.get () in
let inv (h: HS.mem) (l1 l2: list t) (pos1: U32.t) : GTot Type0 =
B.live h bpos_out' /\ (
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
valid_list p h sl_out pos_out pos_out' /\
contents_list p h sl_out pos_out pos_out' == L.filter f l1 /\
U32.v pos_out' - U32.v pos1 <= U32.v pos_out - U32.v pos // necessary to prove that length computations do not overflow
)
in
valid_list_nil p h2 sl_out pos_out;
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))))
inv
(fun h l1 l2 pos1 h' ->
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies_only_not_unused_in (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
let pos_out1 = B.index bpos_out' 0ul in
list_filter_append f (G.reveal l1) [G.reveal x];
if f' sl pos1 x
then begin
assert (B.loc_includes (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) (loc_slice_from_to sl_out pos_out1 (pos_out1 `U32.add` (pos2 `U32.sub` pos1))));
let h = HST.get () in
writable_weaken sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (U32.v pos_out1) (U32.v pos_out1 + (U32.v pos2 - U32.v pos1));
let pos_out2 = copy_strong p sl pos1 pos2 sl_out pos_out1 in
B.upd bpos_out' 0ul pos_out2;
let h' = HST.get () in
writable_modifies sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (B.loc_region_only true (HS.get_tip h1)) h';
valid_list_nil p h' sl_out pos_out2;
valid_list_cons p h' sl_out pos_out1 pos_out2;
valid_list_append p h' sl_out pos_out pos_out1 pos_out2
end else
L.append_l_nil (L.filter f (G.reveal l1))
)
;
let pos_out' = B.index bpos_out' 0ul in
HST.pop_frame ();
pos_out'
#pop-options
#push-options "--z3rlimit 64 --fuel 2 --ifuel 1"
inline_for_extraction
let list_nth
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(i: U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos' /\
U32.v i < L.length (contents_list p h sl pos pos')
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
valid_list p h sl pos res /\
valid p h sl res /\
valid_list p h sl (get_valid_pos p h sl res) pos' /\
L.length (contents_list p h sl pos res) == U32.v i /\
contents p h sl res == L.index (contents_list p h sl pos pos') (U32.v i)
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bpos1 = BF.alloca pos 1ul in
let bk = BF.alloca 0ul 1ul in
let h2 = HST.get () in
valid_list_nil p h0 sl pos;
let _ : bool = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
let k = Seq.index (B.as_seq h bk) 0 in
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bpos1 /\
B.live h bk /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v k /\
U32.v k <= U32.v i
)
(fun h _ _ _ h' ->
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bpos1);
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bk);
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
let pos1 = Seq.index (B.as_seq h bpos1) 0 in
B.live h bpos1 /\
valid p h0 sl pos1 /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl (get_valid_pos p h0 sl pos1) pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v i /\
contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i)
)
(fun _ _ ->
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 ->
let k = B.index bk 0ul in
if k = i
then begin
B.upd bpos1 0ul pos1;
valid_list_cons_recip p h0 sl pos1 pos';
list_index_append (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') (U32.v i);
valid_list_append p h0 sl pos pos1 pos' ;
assert (contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i));
false
end else begin
B.upd bk 0ul (k `U32.add` 1ul);
let h = HST.get () in
B.modifies_only_not_unused_in B.loc_none h0 h;
valid_list_snoc p h0 sl pos pos1;
assert (valid p h0 sl pos1);
assert (pos2 == get_valid_pos p h0 sl pos1);
assert (valid_list p h0 sl pos pos2);
list_length_append (contents_list p h0 sl pos pos1) [contents p h0 sl pos1];
true
end
)
in
let res = B.index bpos1 0ul in
HST.pop_frame ();
res
inline_for_extraction
let list_find
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(f: (t -> Tot bool)) // should be GTot, but List.find requires Tot
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack bool
(requires (fun h ->
valid p h sl pos
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == f (contents p h sl pos)
))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let l = contents_list p h sl pos pos' in
if res = pos'
then L.find f l == None
else
U32.v pos <= U32.v res /\
valid p h sl res /\ (
let x = contents p h sl res in
U32.v res + content_length p h sl res <= U32.v pos' /\
f x == true /\
L.find f l == Some x
)
)))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bres = BF.alloca 0ul 1ul in
let h2 = HST.get () in
let not_found = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\
valid_list p h0 sl pos1 pos' /\
l2 == contents_list p h0 sl pos1 pos' /\
L.find f (contents_list p h0 sl pos pos') == L.find f l2
)
(fun h _ _ _ h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\ (
let res = Seq.index (B.as_seq h bres) 0 in
U32.v pos <= U32.v res /\
valid p h0 sl res /\ (
let x = contents p h0 sl res in
U32.v res + content_length p h0 sl res <= U32.v pos' /\
f x == true /\
L.find f (contents_list p h0 sl pos pos') == Some x
)))
(fun h h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun pos1 pos2 ->
if f' sl pos1
then begin
B.upd bres 0ul pos1;
false
end
else true
)
in
let res =
if not_found
then pos'
else B.index bres 0ul
in
HST.pop_frame ();
res
#pop-options
let rec list_existsb_find
(#a: Type)
(f: (a -> Tot bool))
(l: list a)
: Lemma
(L.existsb f l == Some? (L.find f l))
= match l with
| [] -> ()
| x :: q ->
if f x
then ()
else list_existsb_find f q
inline_for_extraction
noextract
let list_existsb
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(f: (t -> Tot bool)) // should be GTot, but List.find requires Tot
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack bool
(requires (fun h ->
valid p h sl pos
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == f (contents p h sl pos)
))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack bool
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == L.existsb f (contents_list p h sl pos pos')
))
= let h = HST.get () in
list_existsb_find f (contents_list p h sl pos pos');
let posn = list_find j f f' sl pos pos' in
posn <> pos'
#push-options "--fuel 2 --ifuel 1 --z3rlimit 256 --query_stats"
inline_for_extraction
noextract
let list_flatten_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 { k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0 } )
(f: (t1 -> Tot (list t2))) // should be GTot, but List.Tot.map requires Tot
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1' : U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2: U32.t {
valid_list p1 h0 sl1 pos1 pos1' /\
U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\
U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32
})
(f' : (
(pos1_: U32.t) ->
(pos2_: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\
U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\ (
let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2_ res /\
contents_list p2 h' sl2 pos2_ res == y
)))
))
: HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\ (
let y = List.Tot.flatten (List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1')) in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2 res /\
contents_list p2 h' sl2 pos2 res == y
)))
= let hz = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bpos2_ = BF.alloca pos2 1ul in
let h2 = HST.get () in
valid_list_nil p2 hz sl2 pos2;
let fits = list_fold_left_gen
p1
j1
sl1
pos1 pos1'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2))
(fun h ll lr _ ->
B.modifies (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h /\
B.live h bpos2_ /\ (
let pos2_ = Seq.index (B.as_seq h bpos2_) 0 in
contents_list p1 h0 sl1 pos1 pos1' == ll `List.Tot.append` lr /\
valid_list p2 h sl2 pos2 pos2_ /\
contents_list p2 h sl2 pos2 pos2_ == List.Tot.flatten (List.Tot.map f ll) /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(fun h _ _ _ h' ->
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun h ->
B.modifies (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h /\
U32.v pos2 + serialized_list_length s2 (List.Tot.flatten (List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1'))) > U32.v sl2.len
)
(fun h h' ->
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1l pos1r ->
let pos2_ = B.index bpos2_ 0ul in
let h = HST.get () in
writable_weaken sl2.base (U32.v pos2) (U32.v sl2.len) h (U32.v pos2_) (U32.v sl2.len);
valid_pos_frame_strong p1 h0 sl1 pos1l pos1r (loc_slice_from sl2 pos2) hz;
let res = f' pos1l pos2_ in
let fits = not (res = max_uint32) in
if fits then begin
B.upd bpos2_ 0ul res;
let h' = HST.get () in
writable_modifies sl2.base (U32.v pos2) (U32.v sl2.len) h (B.loc_region_only true (HS.get_tip h1)) h' ;
List.Tot.append_assoc (contents_list p1 h0 sl1 pos1 pos1l) [contents p1 h0 sl1 pos1l] (contents_list p1 h0 sl1 pos1r pos1');
list_flatten_map_append f (contents_list p1 h0 sl1 pos1 pos1l) [contents p1 h0 sl1 pos1l];
valid_list_snoc p1 h0 sl1 pos1 pos1l;
valid_list_append p2 h' sl2 pos2 pos2_ res;
valid_list_nil p2 h' sl2 res;
valid_list_append p2 h' sl2 pos2_ res res
end else begin
let h' = HST.get () in
valid_list_cons p1 h0 sl1 pos1l pos1' ;
valid_list_append p1 h0 sl1 pos1 pos1l pos1' ;
list_flatten_map_append f (contents_list p1 h0 sl1 pos1 pos1l) (contents_list p1 h0 sl1 pos1l pos1');
serialized_list_length_append s2 (L.flatten (L.map f (contents_list p1 h0 sl1 pos1 pos1l))) (L.flatten (L.map f (contents_list p1 h0 sl1 pos1l pos1')));
serialized_list_length_append s2 (f (contents p1 h0 sl1 pos1l)) (L.flatten (L.map f (contents_list p1 h0 sl1 pos1r pos1')));
valid_list_serialized_list_length s2 h' sl2 pos2 pos2_
end;
fits
)
in
let res =
if fits
then B.index bpos2_ 0ul
else max_uint32
in
HST.pop_frame ();
res
#pop-options
#push-options "--z3rlimit 16 --query_stats"
inline_for_extraction
noextract
let list_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 { k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0 } )
(f: (t1 -> Tot t2)) // should be GTot, but List.Tot.map requires Tot
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1' : U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2: U32.t {
valid_list p1 h0 sl1 pos1 pos1' /\
U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\
U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32
})
(f' : (
(pos1_: U32.t) ->
(pos2_: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\
U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\ (
let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_length s2 y > U32.v sl2.len
else
valid_content_pos p2 h' sl2 pos2_ y res
)))
))
: HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\ (
let y = List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1') in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2 res /\
contents_list p2 h' sl2 pos2 res == y
)))
= list_map_list_flatten_map f (contents_list p1 h0 sl1 pos1 pos1');
list_flatten_map
j1
s2
(fun x -> [f x])
h0
sl1 pos1 pos1'
sl2 pos2
(fun pos1 pos2 ->
let res = f' pos1 pos2 in
let h = HST.get () in
if res = max_uint32
then begin
serialized_list_length_nil s2;
serialized_list_length_cons s2 (f (contents p1 h0 sl1 pos1)) []
end
else begin
valid_list_nil p2 h sl2 res;
valid_list_cons p2 h sl2 pos2 res
end;
res
)
(* Example: trivial printers *)
inline_for_extraction
let print_list
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(print: ((#rrel: _) -> (#rel: _) -> (sl: slice rrel rel) -> (pos: U32.t) -> HST.Stack unit (requires (fun h -> valid p h sl pos)) (ensures (fun h _ h' -> B.modifies B.loc_none h h'))))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack unit
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h _ h' ->
B.modifies B.loc_none h h'
))
= let h0 = HST.get () in
list_fold_left
p
j
sl
pos pos'
h0
(Ghost.hide B.loc_none)
(fun _ _ _ _ -> True)
(fun _ _ _ _ _ -> ())
(fun pos1 _ _ _ _ ->
print sl pos1
)
(* Monotonicity *)
inline_for_extraction
let compl_t (t: Type) = U32.t -> t -> U32.t -> Tot (B.spred byte)
let wvalid
(#t: Type) (#k: parser_kind) (p: parser k t) (#rrel #rel: _) (s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
(gpos' : Ghost.erased U32.t)
(gv: Ghost.erased t)
(x: Seq.seq byte)
: GTot prop
=
U32.v pos <= U32.v (Ghost.reveal gpos') /\
U32.v (Ghost.reveal gpos') <= U32.v s.len /\
U32.v s.len <= Seq.length x /\
parse p (Seq.slice x (U32.v pos) (U32.v s.len)) == Some (Ghost.reveal gv, U32.v (Ghost.reveal gpos') - U32.v pos) /\
compl pos (Ghost.reveal gv) (Ghost.reveal gpos') x
let wvalid_valid_content_pos
(#t: Type) (#k: parser_kind) (p: parser k t) (#rrel #rel: _) (s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
(gpos' : Ghost.erased U32.t)
(gv: Ghost.erased t)
(x: Seq.seq byte)
(h: HS.mem)
: Lemma
(requires (
wvalid p s compl pos gpos' gv x /\
live_slice h s /\
x == B.as_seq h s.base
))
(ensures (
valid_content_pos p h s pos gv gpos'
))
=
valid_facts p h s pos
inline_for_extraction
noeq
type irepr (#t: Type) (#k: parser_kind) (p: parser k t) (#rrel #rel: _) (s: slice rrel rel) (compl: compl_t t) =
| IRepr:
(pos: U32.t) ->
(gpos' : Ghost.erased U32.t) ->
(gv: Ghost.erased t) ->
(irepr_correct: squash (
U32.v pos <= U32.v (Ghost.reveal gpos') /\
U32.v (Ghost.reveal gpos') <= U32.v s.len /\
B.witnessed s.base (wvalid p s compl pos gpos' gv)
)) ->
irepr p s compl
inline_for_extraction
let irepr_pos
(#t: Type) (#k: parser_kind) (#p: parser k t) (#rrel #rel: _) (#s: slice rrel rel) (#compl: compl_t t) (x: irepr p s compl) : Tot U32.t =
IRepr?.pos x
let irepr_pos'
(#t: Type) (#k: parser_kind) (#p: parser k t) (#rrel #rel: _) (#s: slice rrel rel) (#compl: compl_t t) (x: irepr p s compl) : Ghost U32.t
(requires True)
(ensures (fun y -> True))
= Ghost.reveal (IRepr?.gpos' x)
#push-options "--ifuel 1 --fuel 2"
let irepr_pos'_post
(#t: Type) (#k: parser_kind) (#p: parser k t) (#rrel #rel: _) (#s: slice rrel rel) (#compl: compl_t t) (x: irepr p s compl) : Lemma
(requires True)
(ensures (
let y = irepr_pos' x in
U32.v (irepr_pos x) <= U32.v y /\ U32.v y <= U32.v s.len
))
[SMTPat (irepr_pos' x)]
= ()
let irepr_v | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 2,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val irepr_v
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(#s: slice rrel rel)
(#compl: compl_t t)
(x: irepr p s compl)
: GTot t | [] | LowParse.Low.Base.irepr_v | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | x: LowParse.Low.Base.irepr p s compl -> Prims.GTot t | {
"end_col": 28,
"end_line": 2084,
"start_col": 2,
"start_line": 2084
} |
Prims.Tot | val irepr_pos
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(#s: slice rrel rel)
(#compl: compl_t t)
(x: irepr p s compl)
: Tot U32.t | [
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "BF"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let irepr_pos
(#t: Type) (#k: parser_kind) (#p: parser k t) (#rrel #rel: _) (#s: slice rrel rel) (#compl: compl_t t) (x: irepr p s compl) : Tot U32.t =
IRepr?.pos x | val irepr_pos
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(#s: slice rrel rel)
(#compl: compl_t t)
(x: irepr p s compl)
: Tot U32.t
let irepr_pos
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(#s: slice rrel rel)
(#compl: compl_t t)
(x: irepr p s compl)
: Tot U32.t = | false | null | false | IRepr?.pos x | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"LowParse.Low.Base.compl_t",
"LowParse.Low.Base.irepr",
"LowParse.Low.Base.__proj__IRepr__item__pos",
"FStar.UInt32.t"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_weak
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
U32.v sl.len < U32.v max_uint32 /\
writable sl.base (U32.v pos) (U32.v sl.len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from sl pos) h h' /\ (
if pos' = max_uint32
then U32.v pos + serialized_length s x > U32.v sl.len
else valid_content_pos p h' sl pos x pos'
)))
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_strong
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let sq = B.as_seq h sl.base in
let len = serialized_length s x in
live_slice h sl /\
U32.v pos + len <= U32.v sl.len /\
writable sl.base (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from_to sl pos pos') h h' /\
valid_content_pos p h' sl pos x pos'
))
[@unifier_hint_injective]
inline_for_extraction
let serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(b: B.mbuffer byte rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v pos + len <= B.length b /\
writable b (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b pos (pos `U32.add` len)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x
)))
inline_for_extraction
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
= fun x #rrel #rel b pos -> s1' x b pos
inline_for_extraction
let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(posl: Ghost.erased U32.t)
(posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\
U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos) /\
Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr))
)))
=
let h0 = HST.get () in
writable_weaken b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 (U32.v pos) (U32.v pos + Seq.length (serialize s x));
let res = s32 x b pos in
let h1 = HST.get () in
let pos' = pos `U32.add` res in
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos pos';
writable_modifies b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 B.loc_none h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) (Ghost.reveal posl) pos;
B.loc_disjoint_loc_buffer_from_to b (Ghost.reveal posl) pos pos pos';
B.modifies_buffer_from_to_elim b (Ghost.reveal posl) pos (B.loc_buffer_from_to b pos pos') h0 h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos' (Ghost.reveal posr);
B.loc_disjoint_loc_buffer_from_to b pos pos' pos' (Ghost.reveal posr);
B.modifies_buffer_from_to_elim b pos' (Ghost.reveal posr) (B.loc_buffer_from_to b pos pos') h0 h1;
res
inline_for_extraction
let leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s)
= fun x #rrel #rel input pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let len = s32 x input.base pos in
[@inline_let]
let pos' = pos `U32.add` len in
let h = HST.get () in
[@inline_let] let _ =
let large = bytes_of_slice_from h input pos in
let small = bytes_of_slice_from_to h input pos pos' in
parse_strong_prefix p small large;
valid_facts p h input pos
in
pos'
inline_for_extraction
let leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32
))
: Tot (leaf_writer_weak s)
= fun x #rrel #rel input pos ->
if (input.len `U32.sub` pos) `U32.lt` sz
then max_uint32
else begin
let h = HST.get () in
writable_weaken input.base (U32.v pos) (U32.v input.len) h (U32.v pos) (U32.v pos + U32.v sz);
s32 x input pos
end
inline_for_extraction
let serializer32_of_leaf_writer_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_subkind == Some ParserStrong /\
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
))
: Tot (serializer32 s)
= fun x #rrel #rel b pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let pos' = s32 x (make_slice b (pos `U32.add` sz)) pos in
[@inline_let]
let len = pos' `U32.sub` pos in
let h = HST.get () in
[@inline_let] let _ =
valid_valid_exact p h (make_slice b (pos `U32.add` sz)) pos;
valid_exact_serialize s h (make_slice b (pos `U32.add` sz)) pos pos'
in
len
inline_for_extraction
let blit_strong
(#a:Type) (#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src:U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst:U32.t)
(len:U32.t)
: HST.Stack unit
(requires (fun h ->
B.live h src /\ B.live h dst /\
U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len)) (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)
))
= let h = HST.get () in
B.blit src idx_src dst idx_dst len;
let h' = HST.get () in
B.modifies_loc_buffer_from_to_intro dst idx_dst (idx_dst `U32.add` len) B.loc_none h h'
#push-options "--z3rlimit 16"
inline_for_extraction
let copy_strong
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
U32.v dpos + U32.v spos' - U32.v spos <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from_to dst dpos (dpos `U32.add` (spos' `U32.sub` spos)))
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos' /\
dpos' `U32.sub` dpos == spos' `U32.sub` spos
))
= let h0 = HST.get () in
let len = spos' `U32.sub` spos in
valid_facts p h0 src spos;
writable_replace_subseq_elim dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h0 (Seq.slice (B.as_seq h0 src.base) (U32.v spos) (U32.v spos'));
blit_strong src.base spos dst.base dpos len;
let h = HST.get () in
[@inline_let] let dpos' = dpos `U32.add` len in
parse_strong_prefix p (bytes_of_slice_from h0 src spos) (bytes_of_slice_from h dst dpos);
valid_facts p h dst dpos;
dpos'
#pop-options
inline_for_extraction
let copy_strong'
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\ (
let clen = content_length p h src spos in
U32.v dpos + clen <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + clen) h /\
B.loc_disjoint (loc_slice_from src spos) (loc_slice_from_to dst dpos (dpos `U32.add` (U32.uint_to_t clen)))
)))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
))
= let spos' = j src spos in
copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak_with_length
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= if (dst.len `U32.sub` dpos) `U32.lt` (spos' `U32.sub` spos)
then max_uint32
else copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(jmp: jumper p)
(src: slice rrel1 rel1)
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (content_length p h src spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos (get_valid_pos p h src spos)) (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= let spos' = jmp src spos in
copy_weak_with_length p src spos spos' dst dpos
(* fold_left on lists *)
module BF = LowStar.Buffer
#push-options "--z3rlimit 256 --fuel 1 --ifuel 1"
#restart-solver
inline_for_extraction
let list_fold_left_gen
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(post_interrupt: ((h: HS.mem) -> GTot Type0))
(post_interrupt_frame: (h: HS.mem) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
post_interrupt h
)) (ensures (post_interrupt h')))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
HST.Stack bool
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos1 /\
valid_pos p h0 sl pos1 pos2 /\
valid_list p h0 sl pos2 pos' /\
inv h (contents_list p h0 sl pos pos1) (contents p h0 sl pos1 :: contents_list p h0 sl pos2 pos') pos1
))
(ensures (fun h ctinue h' ->
B.modifies (Ghost.reveal l) h h' /\
(if ctinue then inv h' (contents_list p h0 sl pos pos1 `L.append` [contents p h0 sl pos1]) (contents_list p h0 sl pos2 pos') pos2 else post_interrupt h')
))
))
: HST.Stack bool
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h res h' ->
B.modifies (Ghost.reveal l) h h' /\
(if res then inv h' (contents_list p h sl pos pos') [] pos' else post_interrupt h')
))
= HST.push_frame ();
let h1 = HST.get () in
// B.fresh_frame_modifies h0 h1;
let bpos : BF.pointer U32.t = BF.alloca pos 1ul in
let bctinue : BF.pointer bool = BF.alloca true 1ul in
let btest: BF.pointer bool = BF.alloca (pos `U32.lt` pos') 1ul in
let h2 = HST.get () in
assert (B.modifies B.loc_none h0 h2);
let test_pre (h: HS.mem) : GTot Type0 =
B.live h bpos /\ B.live h bctinue /\ B.live h btest /\ (
let pos1 = Seq.index (B.as_seq h bpos) 0 in
let ctinue = Seq.index (B.as_seq h bctinue) 0 in
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
B.modifies (Ghost.reveal l `B.loc_union` B.loc_region_only true (HS.get_tip h1)) h2 h /\
Seq.index (B.as_seq h btest) 0 == ((U32.v (Seq.index (B.as_seq h bpos) 0) < U32.v pos') && Seq.index (B.as_seq h bctinue) 0) /\
(if ctinue then inv h (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 else post_interrupt h)
)
in
let test_post (cond: bool) (h: HS.mem) : GTot Type0 =
test_pre h /\
cond == Seq.index (B.as_seq h btest) 0
in
valid_list_nil p h0 sl pos;
inv_frame h0 [] (contents_list p h0 sl pos pos') pos h1;
inv_frame h1 [] (contents_list p h0 sl pos pos') pos h2;
[@inline_let]
let while_body () : HST.Stack unit
(requires (fun h -> test_post true h))
(ensures (fun _ _ h1 -> test_pre h1))
=
let h51 = HST.get () in
let pos1 = B.index bpos 0ul in
valid_list_cons_recip p h0 sl pos1 pos';
//assert (B.modifies (Ghost.reveal l `B.loc_union` B.loc_buffer bpos) h0 h51);
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos'));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos pos1));
valid_list_cons_recip p h51 sl pos1 pos';
let pos2 = j sl pos1 in
let h52 = HST.get () in
inv_frame h51 (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 h52;
B.modifies_only_not_unused_in (Ghost.reveal l) h0 h52;
let ctinue = body pos1 pos2 in
let h53 = HST.get () in
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos2 pos'));
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h1));
valid_pos_frame_strong p h0 sl pos1 pos2 (Ghost.reveal l) h53;
valid_list_snoc p h0 sl pos pos1;
B.upd bpos 0ul pos2;
B.upd bctinue 0ul ctinue;
B.upd btest 0ul ((pos2 `U32.lt` pos') && ctinue);
let h54 = HST.get () in
[@inline_let]
let _ =
if ctinue
then inv_frame h53 (contents_list p h0 sl pos pos2) (contents_list p h0 sl pos2 pos') pos2 h54
else post_interrupt_frame h53 h54
in
()
in
C.Loops.while
#test_pre
#test_post
(fun (_: unit) -> (
B.index btest 0ul) <: HST.Stack bool (requires (fun h -> test_pre h)) (ensures (fun h x h1 -> test_post x h1)))
while_body
;
valid_list_nil p h0 sl pos';
let res = B.index bctinue 0ul in
let h3 = HST.get () in
HST.pop_frame ();
let h4 = HST.get () in
//B.popped_modifies h3 h4;
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h3));
[@inline_let]
let _ =
if res
then inv_frame h3 (contents_list p h0 sl pos pos') [] pos' h4
else post_interrupt_frame h3 h4
in
res
#pop-options
//B.loc_includes_union_l (B.loc_all_regions_from false (HS.get_tip h1)) (Ghost.reveal l) (Ghost.reveal l)
//B.modifies_fresh_frame_popped h0 h1 (Ghost.reveal l) h3 h4
module G = FStar.Ghost
inline_for_extraction
let list_fold_left
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
(l1: Ghost.erased (list t)) ->
(x: Ghost.erased t) ->
(l2: Ghost.erased (list t)) ->
HST.Stack unit
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos' /\
valid_content_pos p h0 sl pos1 (G.reveal x) pos2 /\
U32.v pos <= U32.v pos1 /\ U32.v pos2 <= U32.v pos' /\
B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2) /\
inv h (Ghost.reveal l1) (Ghost.reveal x :: Ghost.reveal l2) pos1 /\
contents_list p h0 sl pos pos' == Ghost.reveal l1 `L.append` (Ghost.reveal x :: Ghost.reveal l2)
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (Ghost.reveal l1 `L.append` [contents p h0 sl pos1]) (Ghost.reveal l2) pos2
))
))
: HST.Stack unit
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (contents_list p h sl pos pos') [] pos'
))
= let _ = list_fold_left_gen
p
j
sl
pos pos'
h0
l
inv
inv_frame
(fun _ -> False)
(fun _ _ -> ())
(fun pos1 pos2 ->
let h = HST.get () in
valid_list_cons p h sl pos1 pos';
valid_list_append p h sl pos pos1 pos';
body
pos1
pos2
(Ghost.hide (contents_list p h sl pos pos1))
(Ghost.hide (contents p h sl pos1))
(Ghost.hide (contents_list p h sl pos2 pos'))
;
true
)
in
()
inline_for_extraction
let list_length
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v res == L.length (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
B.fresh_frame_modifies h0 h1;
let blen : BF.pointer U32.t = BF.alloca 0ul 1ul in
let h2 = HST.get () in
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer blen))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_buffer blen) h2 h /\
B.live h blen /\ (
let len = U32.v (Seq.index (B.as_seq h blen) 0) in
len <= U32.v pos1 /\ // necessary to prove that length computations do not overflow
len == L.length l1
))
(fun h l1 l2 pos1 h' ->
B.modifies_only_not_unused_in (B.loc_buffer blen) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
B.upd blen 0ul (B.index blen 0ul `U32.add` 1ul);
Classical.forall_intro_2 (list_length_append #t)
)
;
let len = B.index blen 0ul in
HST.pop_frame ();
len
#push-options "--z3rlimit 32 --fuel 2 --ifuel 1"
inline_for_extraction
let list_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(f: (t -> Tot bool))
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
(x: Ghost.erased t) ->
HST.Stack bool
(requires (fun h -> valid_content p h sl pos (G.reveal x)))
(ensures (fun h res h' -> B.modifies B.loc_none h h' /\ res == f (G.reveal x)))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
(#rrel_out #rel_out: _)
(sl_out : slice rrel_out rel_out)
(pos_out : U32.t)
: HST.Stack U32.t
(requires (fun h ->
U32.v pos_out + U32.v pos' - U32.v pos <= U32.v sl_out.len /\
valid_list p h sl pos pos' /\
B.loc_disjoint (loc_slice_from_to sl pos pos') (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
live_slice h sl_out
))
(ensures (fun h pos_out' h' ->
B.modifies (loc_slice_from_to sl_out pos_out pos_out') h h' /\
U32.v pos_out' - U32.v pos_out <= U32.v pos' - U32.v pos /\
valid_list p h' sl_out pos_out pos_out' /\
contents_list p h' sl_out pos_out pos_out' == L.filter f (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
//B.fresh_frame_modifies h0 h1;
let bpos_out' : BF.pointer U32.t = BF.alloca pos_out 1ul in
let h2 = HST.get () in
let inv (h: HS.mem) (l1 l2: list t) (pos1: U32.t) : GTot Type0 =
B.live h bpos_out' /\ (
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
valid_list p h sl_out pos_out pos_out' /\
contents_list p h sl_out pos_out pos_out' == L.filter f l1 /\
U32.v pos_out' - U32.v pos1 <= U32.v pos_out - U32.v pos // necessary to prove that length computations do not overflow
)
in
valid_list_nil p h2 sl_out pos_out;
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))))
inv
(fun h l1 l2 pos1 h' ->
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies_only_not_unused_in (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
let pos_out1 = B.index bpos_out' 0ul in
list_filter_append f (G.reveal l1) [G.reveal x];
if f' sl pos1 x
then begin
assert (B.loc_includes (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) (loc_slice_from_to sl_out pos_out1 (pos_out1 `U32.add` (pos2 `U32.sub` pos1))));
let h = HST.get () in
writable_weaken sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (U32.v pos_out1) (U32.v pos_out1 + (U32.v pos2 - U32.v pos1));
let pos_out2 = copy_strong p sl pos1 pos2 sl_out pos_out1 in
B.upd bpos_out' 0ul pos_out2;
let h' = HST.get () in
writable_modifies sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (B.loc_region_only true (HS.get_tip h1)) h';
valid_list_nil p h' sl_out pos_out2;
valid_list_cons p h' sl_out pos_out1 pos_out2;
valid_list_append p h' sl_out pos_out pos_out1 pos_out2
end else
L.append_l_nil (L.filter f (G.reveal l1))
)
;
let pos_out' = B.index bpos_out' 0ul in
HST.pop_frame ();
pos_out'
#pop-options
#push-options "--z3rlimit 64 --fuel 2 --ifuel 1"
inline_for_extraction
let list_nth
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(i: U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos' /\
U32.v i < L.length (contents_list p h sl pos pos')
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
valid_list p h sl pos res /\
valid p h sl res /\
valid_list p h sl (get_valid_pos p h sl res) pos' /\
L.length (contents_list p h sl pos res) == U32.v i /\
contents p h sl res == L.index (contents_list p h sl pos pos') (U32.v i)
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bpos1 = BF.alloca pos 1ul in
let bk = BF.alloca 0ul 1ul in
let h2 = HST.get () in
valid_list_nil p h0 sl pos;
let _ : bool = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
let k = Seq.index (B.as_seq h bk) 0 in
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bpos1 /\
B.live h bk /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v k /\
U32.v k <= U32.v i
)
(fun h _ _ _ h' ->
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bpos1);
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bk);
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
let pos1 = Seq.index (B.as_seq h bpos1) 0 in
B.live h bpos1 /\
valid p h0 sl pos1 /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl (get_valid_pos p h0 sl pos1) pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v i /\
contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i)
)
(fun _ _ ->
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 ->
let k = B.index bk 0ul in
if k = i
then begin
B.upd bpos1 0ul pos1;
valid_list_cons_recip p h0 sl pos1 pos';
list_index_append (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') (U32.v i);
valid_list_append p h0 sl pos pos1 pos' ;
assert (contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i));
false
end else begin
B.upd bk 0ul (k `U32.add` 1ul);
let h = HST.get () in
B.modifies_only_not_unused_in B.loc_none h0 h;
valid_list_snoc p h0 sl pos pos1;
assert (valid p h0 sl pos1);
assert (pos2 == get_valid_pos p h0 sl pos1);
assert (valid_list p h0 sl pos pos2);
list_length_append (contents_list p h0 sl pos pos1) [contents p h0 sl pos1];
true
end
)
in
let res = B.index bpos1 0ul in
HST.pop_frame ();
res
inline_for_extraction
let list_find
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(f: (t -> Tot bool)) // should be GTot, but List.find requires Tot
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack bool
(requires (fun h ->
valid p h sl pos
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == f (contents p h sl pos)
))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let l = contents_list p h sl pos pos' in
if res = pos'
then L.find f l == None
else
U32.v pos <= U32.v res /\
valid p h sl res /\ (
let x = contents p h sl res in
U32.v res + content_length p h sl res <= U32.v pos' /\
f x == true /\
L.find f l == Some x
)
)))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bres = BF.alloca 0ul 1ul in
let h2 = HST.get () in
let not_found = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\
valid_list p h0 sl pos1 pos' /\
l2 == contents_list p h0 sl pos1 pos' /\
L.find f (contents_list p h0 sl pos pos') == L.find f l2
)
(fun h _ _ _ h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\ (
let res = Seq.index (B.as_seq h bres) 0 in
U32.v pos <= U32.v res /\
valid p h0 sl res /\ (
let x = contents p h0 sl res in
U32.v res + content_length p h0 sl res <= U32.v pos' /\
f x == true /\
L.find f (contents_list p h0 sl pos pos') == Some x
)))
(fun h h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun pos1 pos2 ->
if f' sl pos1
then begin
B.upd bres 0ul pos1;
false
end
else true
)
in
let res =
if not_found
then pos'
else B.index bres 0ul
in
HST.pop_frame ();
res
#pop-options
let rec list_existsb_find
(#a: Type)
(f: (a -> Tot bool))
(l: list a)
: Lemma
(L.existsb f l == Some? (L.find f l))
= match l with
| [] -> ()
| x :: q ->
if f x
then ()
else list_existsb_find f q
inline_for_extraction
noextract
let list_existsb
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(f: (t -> Tot bool)) // should be GTot, but List.find requires Tot
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack bool
(requires (fun h ->
valid p h sl pos
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == f (contents p h sl pos)
))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack bool
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == L.existsb f (contents_list p h sl pos pos')
))
= let h = HST.get () in
list_existsb_find f (contents_list p h sl pos pos');
let posn = list_find j f f' sl pos pos' in
posn <> pos'
#push-options "--fuel 2 --ifuel 1 --z3rlimit 256 --query_stats"
inline_for_extraction
noextract
let list_flatten_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 { k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0 } )
(f: (t1 -> Tot (list t2))) // should be GTot, but List.Tot.map requires Tot
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1' : U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2: U32.t {
valid_list p1 h0 sl1 pos1 pos1' /\
U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\
U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32
})
(f' : (
(pos1_: U32.t) ->
(pos2_: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\
U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\ (
let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2_ res /\
contents_list p2 h' sl2 pos2_ res == y
)))
))
: HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\ (
let y = List.Tot.flatten (List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1')) in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2 res /\
contents_list p2 h' sl2 pos2 res == y
)))
= let hz = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bpos2_ = BF.alloca pos2 1ul in
let h2 = HST.get () in
valid_list_nil p2 hz sl2 pos2;
let fits = list_fold_left_gen
p1
j1
sl1
pos1 pos1'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2))
(fun h ll lr _ ->
B.modifies (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h /\
B.live h bpos2_ /\ (
let pos2_ = Seq.index (B.as_seq h bpos2_) 0 in
contents_list p1 h0 sl1 pos1 pos1' == ll `List.Tot.append` lr /\
valid_list p2 h sl2 pos2 pos2_ /\
contents_list p2 h sl2 pos2 pos2_ == List.Tot.flatten (List.Tot.map f ll) /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(fun h _ _ _ h' ->
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun h ->
B.modifies (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h /\
U32.v pos2 + serialized_list_length s2 (List.Tot.flatten (List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1'))) > U32.v sl2.len
)
(fun h h' ->
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1l pos1r ->
let pos2_ = B.index bpos2_ 0ul in
let h = HST.get () in
writable_weaken sl2.base (U32.v pos2) (U32.v sl2.len) h (U32.v pos2_) (U32.v sl2.len);
valid_pos_frame_strong p1 h0 sl1 pos1l pos1r (loc_slice_from sl2 pos2) hz;
let res = f' pos1l pos2_ in
let fits = not (res = max_uint32) in
if fits then begin
B.upd bpos2_ 0ul res;
let h' = HST.get () in
writable_modifies sl2.base (U32.v pos2) (U32.v sl2.len) h (B.loc_region_only true (HS.get_tip h1)) h' ;
List.Tot.append_assoc (contents_list p1 h0 sl1 pos1 pos1l) [contents p1 h0 sl1 pos1l] (contents_list p1 h0 sl1 pos1r pos1');
list_flatten_map_append f (contents_list p1 h0 sl1 pos1 pos1l) [contents p1 h0 sl1 pos1l];
valid_list_snoc p1 h0 sl1 pos1 pos1l;
valid_list_append p2 h' sl2 pos2 pos2_ res;
valid_list_nil p2 h' sl2 res;
valid_list_append p2 h' sl2 pos2_ res res
end else begin
let h' = HST.get () in
valid_list_cons p1 h0 sl1 pos1l pos1' ;
valid_list_append p1 h0 sl1 pos1 pos1l pos1' ;
list_flatten_map_append f (contents_list p1 h0 sl1 pos1 pos1l) (contents_list p1 h0 sl1 pos1l pos1');
serialized_list_length_append s2 (L.flatten (L.map f (contents_list p1 h0 sl1 pos1 pos1l))) (L.flatten (L.map f (contents_list p1 h0 sl1 pos1l pos1')));
serialized_list_length_append s2 (f (contents p1 h0 sl1 pos1l)) (L.flatten (L.map f (contents_list p1 h0 sl1 pos1r pos1')));
valid_list_serialized_list_length s2 h' sl2 pos2 pos2_
end;
fits
)
in
let res =
if fits
then B.index bpos2_ 0ul
else max_uint32
in
HST.pop_frame ();
res
#pop-options
#push-options "--z3rlimit 16 --query_stats"
inline_for_extraction
noextract
let list_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 { k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0 } )
(f: (t1 -> Tot t2)) // should be GTot, but List.Tot.map requires Tot
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1' : U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2: U32.t {
valid_list p1 h0 sl1 pos1 pos1' /\
U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\
U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32
})
(f' : (
(pos1_: U32.t) ->
(pos2_: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\
U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\ (
let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_length s2 y > U32.v sl2.len
else
valid_content_pos p2 h' sl2 pos2_ y res
)))
))
: HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\ (
let y = List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1') in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2 res /\
contents_list p2 h' sl2 pos2 res == y
)))
= list_map_list_flatten_map f (contents_list p1 h0 sl1 pos1 pos1');
list_flatten_map
j1
s2
(fun x -> [f x])
h0
sl1 pos1 pos1'
sl2 pos2
(fun pos1 pos2 ->
let res = f' pos1 pos2 in
let h = HST.get () in
if res = max_uint32
then begin
serialized_list_length_nil s2;
serialized_list_length_cons s2 (f (contents p1 h0 sl1 pos1)) []
end
else begin
valid_list_nil p2 h sl2 res;
valid_list_cons p2 h sl2 pos2 res
end;
res
)
(* Example: trivial printers *)
inline_for_extraction
let print_list
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(print: ((#rrel: _) -> (#rel: _) -> (sl: slice rrel rel) -> (pos: U32.t) -> HST.Stack unit (requires (fun h -> valid p h sl pos)) (ensures (fun h _ h' -> B.modifies B.loc_none h h'))))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack unit
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h _ h' ->
B.modifies B.loc_none h h'
))
= let h0 = HST.get () in
list_fold_left
p
j
sl
pos pos'
h0
(Ghost.hide B.loc_none)
(fun _ _ _ _ -> True)
(fun _ _ _ _ _ -> ())
(fun pos1 _ _ _ _ ->
print sl pos1
)
(* Monotonicity *)
inline_for_extraction
let compl_t (t: Type) = U32.t -> t -> U32.t -> Tot (B.spred byte)
let wvalid
(#t: Type) (#k: parser_kind) (p: parser k t) (#rrel #rel: _) (s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
(gpos' : Ghost.erased U32.t)
(gv: Ghost.erased t)
(x: Seq.seq byte)
: GTot prop
=
U32.v pos <= U32.v (Ghost.reveal gpos') /\
U32.v (Ghost.reveal gpos') <= U32.v s.len /\
U32.v s.len <= Seq.length x /\
parse p (Seq.slice x (U32.v pos) (U32.v s.len)) == Some (Ghost.reveal gv, U32.v (Ghost.reveal gpos') - U32.v pos) /\
compl pos (Ghost.reveal gv) (Ghost.reveal gpos') x
let wvalid_valid_content_pos
(#t: Type) (#k: parser_kind) (p: parser k t) (#rrel #rel: _) (s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
(gpos' : Ghost.erased U32.t)
(gv: Ghost.erased t)
(x: Seq.seq byte)
(h: HS.mem)
: Lemma
(requires (
wvalid p s compl pos gpos' gv x /\
live_slice h s /\
x == B.as_seq h s.base
))
(ensures (
valid_content_pos p h s pos gv gpos'
))
=
valid_facts p h s pos
inline_for_extraction
noeq
type irepr (#t: Type) (#k: parser_kind) (p: parser k t) (#rrel #rel: _) (s: slice rrel rel) (compl: compl_t t) =
| IRepr:
(pos: U32.t) ->
(gpos' : Ghost.erased U32.t) ->
(gv: Ghost.erased t) ->
(irepr_correct: squash (
U32.v pos <= U32.v (Ghost.reveal gpos') /\
U32.v (Ghost.reveal gpos') <= U32.v s.len /\
B.witnessed s.base (wvalid p s compl pos gpos' gv)
)) ->
irepr p s compl
inline_for_extraction
let irepr_pos | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val irepr_pos
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(#s: slice rrel rel)
(#compl: compl_t t)
(x: irepr p s compl)
: Tot U32.t | [] | LowParse.Low.Base.irepr_pos | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | x: LowParse.Low.Base.irepr p s compl -> FStar.UInt32.t | {
"end_col": 14,
"end_line": 2062,
"start_col": 2,
"start_line": 2062
} |
FStar.HyperStack.ST.Stack | val comment (s: string)
: HST.Stack unit (requires (fun _ -> True)) (ensures (fun h _ h' -> h == h')) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s | val comment (s: string)
: HST.Stack unit (requires (fun _ -> True)) (ensures (fun h _ h' -> h == h'))
let comment (s: string)
: HST.Stack unit (requires (fun _ -> True)) (ensures (fun h _ h' -> h == h')) = | true | null | false | LowStar.Comment.comment s | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [] | [
"Prims.string",
"LowStar.Comment.comment",
"Prims.unit",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_True",
"Prims.eq2"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True)) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val comment (s: string)
: HST.Stack unit (requires (fun _ -> True)) (ensures (fun h _ h' -> h == h')) | [] | LowParse.Low.Base.comment | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | s: Prims.string -> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 27,
"end_line": 215,
"start_col": 2,
"start_line": 215
} |
Prims.Tot | val validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u:
unit
{ k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal })
: Tot (validator p) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u) | val validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u:
unit
{ k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal })
: Tot (validator p)
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u:
unit
{ k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal })
: Tot (validator p) = | false | null | false | validate_no_read (validate_total_constant_size_no_read p sz u) | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"FStar.UInt64.t",
"Prims.unit",
"Prims.l_and",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"Prims.nat",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_high",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_low",
"Prims.int",
"Prims.l_or",
"Prims.b2t",
"Prims.op_GreaterThanOrEqual",
"FStar.UInt.size",
"FStar.UInt64.n",
"FStar.UInt64.v",
"LowParse.Spec.Base.parser_kind_metadata_some",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_metadata",
"LowParse.Spec.Base.ParserKindMetadataTotal",
"LowParse.Low.Base.validate_no_read",
"LowParse.Low.Base.validate_total_constant_size_no_read",
"LowParse.Low.Base.validator"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
}) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u:
unit
{ k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal })
: Tot (validator p) | [] | LowParse.Low.Base.validate_total_constant_size | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
p: LowParse.Spec.Base.parser k t ->
sz: FStar.UInt64.t ->
u99:
u100:
Prims.unit
{ Mkparser_kind'?.parser_kind_high k ==
FStar.Pervasives.Native.Some (Mkparser_kind'?.parser_kind_low k) /\
Mkparser_kind'?.parser_kind_low k == FStar.UInt64.v sz /\
Mkparser_kind'?.parser_kind_metadata k ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserKindMetadataTotal }
-> LowParse.Low.Base.validator p | {
"end_col": 64,
"end_line": 319,
"start_col": 2,
"start_line": 319
} |
Prims.Tot | val validate_no_read (#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator_no_read p)
: Tot (validator p) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos | val validate_no_read (#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator_no_read p)
: Tot (validator p)
let validate_no_read (#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator_no_read p)
: Tot (validator p) = | false | null | false | fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Low.Base.validator_no_read",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"FStar.UInt64.t",
"FStar.Ghost.hide",
"LowParse.Slice.__proj__Mkslice__item__len",
"LowParse.Low.Base.validator"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val validate_no_read (#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator_no_read p)
: Tot (validator p) | [] | LowParse.Low.Base.validate_no_read | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | v: LowParse.Low.Base.validator_no_read p -> LowParse.Low.Base.validator p | {
"end_col": 55,
"end_line": 208,
"start_col": 2,
"start_line": 208
} |
Prims.Tot | val validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u:
unit
{ k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal })
: Tot (validator_no_read p) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz) | val validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u:
unit
{ k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal })
: Tot (validator_no_read p)
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u:
unit
{ k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal })
: Tot (validator_no_read p) = | false | null | false | fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@@ inline_let ]let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt ((Cast.uint32_to_uint64 len) `U64.sub` pos) sz
then validator_error_not_enough_data
else (pos `U64.add` sz) | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"FStar.UInt64.t",
"Prims.unit",
"Prims.l_and",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"Prims.nat",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_high",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_low",
"Prims.int",
"Prims.l_or",
"Prims.b2t",
"Prims.op_GreaterThanOrEqual",
"FStar.UInt.size",
"FStar.UInt64.n",
"FStar.UInt64.v",
"LowParse.Spec.Base.parser_kind_metadata_some",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_metadata",
"LowParse.Spec.Base.ParserKindMetadataTotal",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"FStar.Ghost.erased",
"LowParse.Slice.slice",
"FStar.UInt32.t",
"LowParse.Slice.__proj__Mkslice__item__len",
"FStar.Ghost.reveal",
"FStar.UInt64.lt",
"FStar.UInt64.sub",
"FStar.Int.Cast.uint32_to_uint64",
"LowParse.Low.ErrorCode.validator_error_not_enough_data",
"Prims.bool",
"FStar.UInt64.add",
"LowParse.Low.Base.valid_total_constant_size",
"LowParse.Low.ErrorCode.uint64_to_uint32",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowParse.Low.Base.validator_no_read"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
}) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u:
unit
{ k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal })
: Tot (validator_no_read p) | [] | LowParse.Low.Base.validate_total_constant_size_no_read | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
p: LowParse.Spec.Base.parser k t ->
sz: FStar.UInt64.t ->
u94:
u95:
Prims.unit
{ Mkparser_kind'?.parser_kind_high k ==
FStar.Pervasives.Native.Some (Mkparser_kind'?.parser_kind_low k) /\
Mkparser_kind'?.parser_kind_low k == FStar.UInt64.v sz /\
Mkparser_kind'?.parser_kind_metadata k ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserKindMetadataTotal }
-> LowParse.Low.Base.validator_no_read p | {
"end_col": 22,
"end_line": 305,
"start_col": 2,
"start_line": 299
} |
Prims.Tot | val validate_with_error_code
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(c: error_code)
: Tot (validator p) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c | val validate_with_error_code
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(c: error_code)
: Tot (validator p)
let validate_with_error_code
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(c: error_code)
: Tot (validator p) = | false | null | false | fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Low.Base.validator",
"LowParse.Low.ErrorCode.error_code",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"FStar.UInt64.t",
"LowParse.Low.ErrorCode.maybe_set_error_code"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val validate_with_error_code
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(c: error_code)
: Tot (validator p) | [] | LowParse.Low.Base.validate_with_error_code | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | v: LowParse.Low.Base.validator p -> c: LowParse.Low.ErrorCode.error_code
-> LowParse.Low.Base.validator p | {
"end_col": 32,
"end_line": 236,
"start_col": 2,
"start_line": 234
} |
Prims.Tot | val read_with_comment (s: string) (#k: parser_kind) (#t: Type) (#p: parser k t) (r: leaf_reader p)
: Tot (leaf_reader p) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos | val read_with_comment (s: string) (#k: parser_kind) (#t: Type) (#p: parser k t) (r: leaf_reader p)
: Tot (leaf_reader p)
let read_with_comment (s: string) (#k: parser_kind) (#t: Type) (#p: parser k t) (r: leaf_reader p)
: Tot (leaf_reader p) = | false | null | false | fun #rrel #rel sl pos ->
comment s;
r sl pos | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"total"
] | [
"Prims.string",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Low.Base.leaf_reader",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"FStar.UInt32.t",
"Prims.unit",
"LowParse.Low.Base.comment"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val read_with_comment (s: string) (#k: parser_kind) (#t: Type) (#p: parser k t) (r: leaf_reader p)
: Tot (leaf_reader p) | [] | LowParse.Low.Base.read_with_comment | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | s: Prims.string -> r: LowParse.Low.Base.leaf_reader p -> LowParse.Low.Base.leaf_reader p | {
"end_col": 12,
"end_line": 519,
"start_col": 2,
"start_line": 517
} |
Prims.GTot | val seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort | val seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 = | false | null | false | Seq.length sshort <= Seq.length slong /\ (Seq.slice slong 0 (Seq.length sshort)) `Seq.equal` sshort | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"sometrivial"
] | [
"FStar.Seq.Base.seq",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.Seq.Base.length",
"FStar.Seq.Base.equal",
"FStar.Seq.Base.slice"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 | [] | LowParse.Low.Base.seq_starts_with | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | slong: FStar.Seq.Base.seq t -> sshort: FStar.Seq.Base.seq t -> Prims.GTot Type0 | {
"end_col": 58,
"end_line": 438,
"start_col": 2,
"start_line": 437
} |
Prims.GTot | val writable (#t: Type) (#rrel #rel: _) (b: B.mbuffer t rrel rel) (pos pos': nat) (h: HS.mem)
: GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
))) | val writable (#t: Type) (#rrel #rel: _) (b: B.mbuffer t rrel rel) (pos pos': nat) (h: HS.mem)
: GTot Type0
let writable (#t: Type) (#rrel #rel: _) (b: B.mbuffer t rrel rel) (pos pos': nat) (h: HS.mem)
: GTot Type0 = | false | null | false | let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==>
((forall (s1: Seq.lseq t (pos' - pos)). {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2: Seq.lseq t (pos' - pos)). {:pattern (Seq.replace_subseq s pos pos' s2)}
(Seq.replace_subseq s pos pos' s1) `rel` (Seq.replace_subseq s pos pos' s2)))) | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.nat",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowStar.Monotonic.Buffer.live",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"LowStar.Monotonic.Buffer.length",
"Prims.l_Forall",
"FStar.Seq.Properties.lseq",
"Prims.op_Subtraction",
"FStar.Seq.Properties.replace_subseq",
"FStar.Seq.Base.seq",
"LowStar.Monotonic.Buffer.as_seq"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val writable (#t: Type) (#rrel #rel: _) (b: B.mbuffer t rrel rel) (pos pos': nat) (h: HS.mem)
: GTot Type0 | [] | LowParse.Low.Base.writable | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
b: LowStar.Monotonic.Buffer.mbuffer t rrel rel ->
pos: Prims.nat ->
pos': Prims.nat ->
h: FStar.Monotonic.HyperStack.mem
-> Prims.GTot Type0 | {
"end_col": 5,
"end_line": 558,
"start_col": 1,
"start_line": 552
} |
Prims.Tot | val validate_with_comment (s: string) (#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p)
: Tot (validator p) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos | val validate_with_comment (s: string) (#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p)
: Tot (validator p)
let validate_with_comment (s: string) (#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p)
: Tot (validator p) = | false | null | false | fun #rrel #rel sl pos ->
comment s;
v sl pos | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"total"
] | [
"Prims.string",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Low.Base.validator",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"FStar.UInt64.t",
"Prims.unit",
"LowParse.Low.Base.comment"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val validate_with_comment (s: string) (#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p)
: Tot (validator p) | [] | LowParse.Low.Base.validate_with_comment | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | s: Prims.string -> v: LowParse.Low.Base.validator p -> LowParse.Low.Base.validator p | {
"end_col": 10,
"end_line": 228,
"start_col": 2,
"start_line": 226
} |
Prims.Tot | val validate_weaken
(k1 #k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 {k1 `is_weaker_than` k2})
: Tot (validator (weaken k1 p2)) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos | val validate_weaken
(k1 #k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 {k1 `is_weaker_than` k2})
: Tot (validator (weaken k1 p2))
let validate_weaken
(k1 #k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 {k1 `is_weaker_than` k2})
: Tot (validator (weaken k1 p2)) = | false | null | false | fun #rrel #rel sl pos ->
let h = HST.get () in
[@@ inline_let ]let _ = valid_weaken k1 p2 h sl (uint64_to_uint32 pos) in
v2 sl pos | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Low.Base.validator",
"LowParse.Spec.Base.is_weaker_than",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"FStar.UInt64.t",
"Prims.unit",
"LowParse.Low.Base.valid_weaken",
"LowParse.Low.ErrorCode.uint64_to_uint32",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowParse.Spec.Base.weaken"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } ) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val validate_weaken
(k1 #k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 {k1 `is_weaker_than` k2})
: Tot (validator (weaken k1 p2)) | [] | LowParse.Low.Base.validate_weaken | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
k1: LowParse.Spec.Base.parser_kind ->
v2: LowParse.Low.Base.validator p2 {LowParse.Spec.Base.is_weaker_than k1 k2}
-> LowParse.Low.Base.validator (LowParse.Spec.Base.weaken k1 p2) | {
"end_col": 11,
"end_line": 373,
"start_col": 2,
"start_line": 368
} |
FStar.Pervasives.Lemma | val writable_ext (#t: Type) (#rrel #rel: _) (b: B.mbuffer t rrel rel) (pos pos': nat) (h h': HS.mem)
: Lemma
(requires
(writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\
(B.as_seq h' b) `Seq.equal` (B.as_seq h b) /\ B.live h' b))
(ensures (writable b pos pos' h')) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h' | val writable_ext (#t: Type) (#rrel #rel: _) (b: B.mbuffer t rrel rel) (pos pos': nat) (h h': HS.mem)
: Lemma
(requires
(writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\
(B.as_seq h' b) `Seq.equal` (B.as_seq h b) /\ B.live h' b))
(ensures (writable b pos pos' h'))
let writable_ext (#t: Type) (#rrel #rel: _) (b: B.mbuffer t rrel rel) (pos pos': nat) (h h': HS.mem)
: Lemma
(requires
(writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\
(B.as_seq h' b) `Seq.equal` (B.as_seq h b) /\ B.live h' b))
(ensures (writable b pos pos' h')) = | false | null | true | writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h' | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.nat",
"FStar.Monotonic.HyperStack.mem",
"LowParse.Low.Base.writable_replace_subseq",
"FStar.Seq.Base.slice",
"LowStar.Monotonic.Buffer.as_seq",
"Prims.unit",
"Prims.l_and",
"LowParse.Low.Base.writable",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"LowStar.Monotonic.Buffer.length",
"FStar.Seq.Base.equal",
"LowStar.Monotonic.Buffer.live",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h' | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val writable_ext (#t: Type) (#rrel #rel: _) (b: B.mbuffer t rrel rel) (pos pos': nat) (h h': HS.mem)
: Lemma
(requires
(writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\
(B.as_seq h' b) `Seq.equal` (B.as_seq h b) /\ B.live h' b))
(ensures (writable b pos pos' h')) | [] | LowParse.Low.Base.writable_ext | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
b: LowStar.Monotonic.Buffer.mbuffer t rrel rel ->
pos: Prims.nat ->
pos': Prims.nat ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
LowParse.Low.Base.writable b pos pos' h /\ pos <= pos' /\
pos' <= LowStar.Monotonic.Buffer.length b /\
FStar.Seq.Base.equal (LowStar.Monotonic.Buffer.as_seq h' b)
(LowStar.Monotonic.Buffer.as_seq h b) /\ LowStar.Monotonic.Buffer.live h' b)
(ensures LowParse.Low.Base.writable b pos pos' h') | {
"end_col": 77,
"end_line": 676,
"start_col": 2,
"start_line": 676
} |
FStar.HyperStack.ST.Stack | val print_list
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(print:
(#rrel: _ -> #rel: _ -> sl: slice rrel rel -> pos: U32.t
-> HST.Stack unit
(requires (fun h -> valid p h sl pos))
(ensures (fun h _ h' -> B.modifies B.loc_none h h'))))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos': U32.t)
: HST.Stack unit
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h _ h' -> B.modifies B.loc_none h h')) | [
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "BF"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let print_list
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(print: ((#rrel: _) -> (#rel: _) -> (sl: slice rrel rel) -> (pos: U32.t) -> HST.Stack unit (requires (fun h -> valid p h sl pos)) (ensures (fun h _ h' -> B.modifies B.loc_none h h'))))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack unit
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h _ h' ->
B.modifies B.loc_none h h'
))
= let h0 = HST.get () in
list_fold_left
p
j
sl
pos pos'
h0
(Ghost.hide B.loc_none)
(fun _ _ _ _ -> True)
(fun _ _ _ _ _ -> ())
(fun pos1 _ _ _ _ ->
print sl pos1
) | val print_list
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(print:
(#rrel: _ -> #rel: _ -> sl: slice rrel rel -> pos: U32.t
-> HST.Stack unit
(requires (fun h -> valid p h sl pos))
(ensures (fun h _ h' -> B.modifies B.loc_none h h'))))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos': U32.t)
: HST.Stack unit
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h _ h' -> B.modifies B.loc_none h h'))
let print_list
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(print:
(#rrel: _ -> #rel: _ -> sl: slice rrel rel -> pos: U32.t
-> HST.Stack unit
(requires (fun h -> valid p h sl pos))
(ensures (fun h _ h' -> B.modifies B.loc_none h h'))))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos': U32.t)
: HST.Stack unit
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h _ h' -> B.modifies B.loc_none h h')) = | true | null | false | let h0 = HST.get () in
list_fold_left p j sl pos pos' h0 (Ghost.hide B.loc_none) (fun _ _ _ _ -> True)
(fun _ _ _ _ _ -> ()) (fun pos1 _ _ _ _ -> print sl pos1) | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Low.Base.jumper",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"FStar.UInt32.t",
"Prims.unit",
"FStar.Monotonic.HyperStack.mem",
"LowParse.Low.Base.Spec.valid",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_none",
"LowParse.Low.Base.list_fold_left",
"FStar.Ghost.hide",
"LowStar.Monotonic.Buffer.loc",
"Prims.list",
"Prims.l_True",
"FStar.Ghost.erased",
"FStar.HyperStack.ST.get",
"LowParse.Low.Base.Spec.valid_list"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_weak
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
U32.v sl.len < U32.v max_uint32 /\
writable sl.base (U32.v pos) (U32.v sl.len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from sl pos) h h' /\ (
if pos' = max_uint32
then U32.v pos + serialized_length s x > U32.v sl.len
else valid_content_pos p h' sl pos x pos'
)))
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_strong
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let sq = B.as_seq h sl.base in
let len = serialized_length s x in
live_slice h sl /\
U32.v pos + len <= U32.v sl.len /\
writable sl.base (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from_to sl pos pos') h h' /\
valid_content_pos p h' sl pos x pos'
))
[@unifier_hint_injective]
inline_for_extraction
let serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(b: B.mbuffer byte rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v pos + len <= B.length b /\
writable b (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b pos (pos `U32.add` len)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x
)))
inline_for_extraction
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
= fun x #rrel #rel b pos -> s1' x b pos
inline_for_extraction
let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(posl: Ghost.erased U32.t)
(posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\
U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos) /\
Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr))
)))
=
let h0 = HST.get () in
writable_weaken b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 (U32.v pos) (U32.v pos + Seq.length (serialize s x));
let res = s32 x b pos in
let h1 = HST.get () in
let pos' = pos `U32.add` res in
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos pos';
writable_modifies b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 B.loc_none h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) (Ghost.reveal posl) pos;
B.loc_disjoint_loc_buffer_from_to b (Ghost.reveal posl) pos pos pos';
B.modifies_buffer_from_to_elim b (Ghost.reveal posl) pos (B.loc_buffer_from_to b pos pos') h0 h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos' (Ghost.reveal posr);
B.loc_disjoint_loc_buffer_from_to b pos pos' pos' (Ghost.reveal posr);
B.modifies_buffer_from_to_elim b pos' (Ghost.reveal posr) (B.loc_buffer_from_to b pos pos') h0 h1;
res
inline_for_extraction
let leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s)
= fun x #rrel #rel input pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let len = s32 x input.base pos in
[@inline_let]
let pos' = pos `U32.add` len in
let h = HST.get () in
[@inline_let] let _ =
let large = bytes_of_slice_from h input pos in
let small = bytes_of_slice_from_to h input pos pos' in
parse_strong_prefix p small large;
valid_facts p h input pos
in
pos'
inline_for_extraction
let leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32
))
: Tot (leaf_writer_weak s)
= fun x #rrel #rel input pos ->
if (input.len `U32.sub` pos) `U32.lt` sz
then max_uint32
else begin
let h = HST.get () in
writable_weaken input.base (U32.v pos) (U32.v input.len) h (U32.v pos) (U32.v pos + U32.v sz);
s32 x input pos
end
inline_for_extraction
let serializer32_of_leaf_writer_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_subkind == Some ParserStrong /\
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
))
: Tot (serializer32 s)
= fun x #rrel #rel b pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let pos' = s32 x (make_slice b (pos `U32.add` sz)) pos in
[@inline_let]
let len = pos' `U32.sub` pos in
let h = HST.get () in
[@inline_let] let _ =
valid_valid_exact p h (make_slice b (pos `U32.add` sz)) pos;
valid_exact_serialize s h (make_slice b (pos `U32.add` sz)) pos pos'
in
len
inline_for_extraction
let blit_strong
(#a:Type) (#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src:U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst:U32.t)
(len:U32.t)
: HST.Stack unit
(requires (fun h ->
B.live h src /\ B.live h dst /\
U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len)) (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)
))
= let h = HST.get () in
B.blit src idx_src dst idx_dst len;
let h' = HST.get () in
B.modifies_loc_buffer_from_to_intro dst idx_dst (idx_dst `U32.add` len) B.loc_none h h'
#push-options "--z3rlimit 16"
inline_for_extraction
let copy_strong
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
U32.v dpos + U32.v spos' - U32.v spos <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from_to dst dpos (dpos `U32.add` (spos' `U32.sub` spos)))
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos' /\
dpos' `U32.sub` dpos == spos' `U32.sub` spos
))
= let h0 = HST.get () in
let len = spos' `U32.sub` spos in
valid_facts p h0 src spos;
writable_replace_subseq_elim dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h0 (Seq.slice (B.as_seq h0 src.base) (U32.v spos) (U32.v spos'));
blit_strong src.base spos dst.base dpos len;
let h = HST.get () in
[@inline_let] let dpos' = dpos `U32.add` len in
parse_strong_prefix p (bytes_of_slice_from h0 src spos) (bytes_of_slice_from h dst dpos);
valid_facts p h dst dpos;
dpos'
#pop-options
inline_for_extraction
let copy_strong'
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\ (
let clen = content_length p h src spos in
U32.v dpos + clen <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + clen) h /\
B.loc_disjoint (loc_slice_from src spos) (loc_slice_from_to dst dpos (dpos `U32.add` (U32.uint_to_t clen)))
)))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
))
= let spos' = j src spos in
copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak_with_length
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= if (dst.len `U32.sub` dpos) `U32.lt` (spos' `U32.sub` spos)
then max_uint32
else copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(jmp: jumper p)
(src: slice rrel1 rel1)
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (content_length p h src spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos (get_valid_pos p h src spos)) (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= let spos' = jmp src spos in
copy_weak_with_length p src spos spos' dst dpos
(* fold_left on lists *)
module BF = LowStar.Buffer
#push-options "--z3rlimit 256 --fuel 1 --ifuel 1"
#restart-solver
inline_for_extraction
let list_fold_left_gen
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(post_interrupt: ((h: HS.mem) -> GTot Type0))
(post_interrupt_frame: (h: HS.mem) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
post_interrupt h
)) (ensures (post_interrupt h')))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
HST.Stack bool
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos1 /\
valid_pos p h0 sl pos1 pos2 /\
valid_list p h0 sl pos2 pos' /\
inv h (contents_list p h0 sl pos pos1) (contents p h0 sl pos1 :: contents_list p h0 sl pos2 pos') pos1
))
(ensures (fun h ctinue h' ->
B.modifies (Ghost.reveal l) h h' /\
(if ctinue then inv h' (contents_list p h0 sl pos pos1 `L.append` [contents p h0 sl pos1]) (contents_list p h0 sl pos2 pos') pos2 else post_interrupt h')
))
))
: HST.Stack bool
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h res h' ->
B.modifies (Ghost.reveal l) h h' /\
(if res then inv h' (contents_list p h sl pos pos') [] pos' else post_interrupt h')
))
= HST.push_frame ();
let h1 = HST.get () in
// B.fresh_frame_modifies h0 h1;
let bpos : BF.pointer U32.t = BF.alloca pos 1ul in
let bctinue : BF.pointer bool = BF.alloca true 1ul in
let btest: BF.pointer bool = BF.alloca (pos `U32.lt` pos') 1ul in
let h2 = HST.get () in
assert (B.modifies B.loc_none h0 h2);
let test_pre (h: HS.mem) : GTot Type0 =
B.live h bpos /\ B.live h bctinue /\ B.live h btest /\ (
let pos1 = Seq.index (B.as_seq h bpos) 0 in
let ctinue = Seq.index (B.as_seq h bctinue) 0 in
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
B.modifies (Ghost.reveal l `B.loc_union` B.loc_region_only true (HS.get_tip h1)) h2 h /\
Seq.index (B.as_seq h btest) 0 == ((U32.v (Seq.index (B.as_seq h bpos) 0) < U32.v pos') && Seq.index (B.as_seq h bctinue) 0) /\
(if ctinue then inv h (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 else post_interrupt h)
)
in
let test_post (cond: bool) (h: HS.mem) : GTot Type0 =
test_pre h /\
cond == Seq.index (B.as_seq h btest) 0
in
valid_list_nil p h0 sl pos;
inv_frame h0 [] (contents_list p h0 sl pos pos') pos h1;
inv_frame h1 [] (contents_list p h0 sl pos pos') pos h2;
[@inline_let]
let while_body () : HST.Stack unit
(requires (fun h -> test_post true h))
(ensures (fun _ _ h1 -> test_pre h1))
=
let h51 = HST.get () in
let pos1 = B.index bpos 0ul in
valid_list_cons_recip p h0 sl pos1 pos';
//assert (B.modifies (Ghost.reveal l `B.loc_union` B.loc_buffer bpos) h0 h51);
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos'));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos pos1));
valid_list_cons_recip p h51 sl pos1 pos';
let pos2 = j sl pos1 in
let h52 = HST.get () in
inv_frame h51 (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 h52;
B.modifies_only_not_unused_in (Ghost.reveal l) h0 h52;
let ctinue = body pos1 pos2 in
let h53 = HST.get () in
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos2 pos'));
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h1));
valid_pos_frame_strong p h0 sl pos1 pos2 (Ghost.reveal l) h53;
valid_list_snoc p h0 sl pos pos1;
B.upd bpos 0ul pos2;
B.upd bctinue 0ul ctinue;
B.upd btest 0ul ((pos2 `U32.lt` pos') && ctinue);
let h54 = HST.get () in
[@inline_let]
let _ =
if ctinue
then inv_frame h53 (contents_list p h0 sl pos pos2) (contents_list p h0 sl pos2 pos') pos2 h54
else post_interrupt_frame h53 h54
in
()
in
C.Loops.while
#test_pre
#test_post
(fun (_: unit) -> (
B.index btest 0ul) <: HST.Stack bool (requires (fun h -> test_pre h)) (ensures (fun h x h1 -> test_post x h1)))
while_body
;
valid_list_nil p h0 sl pos';
let res = B.index bctinue 0ul in
let h3 = HST.get () in
HST.pop_frame ();
let h4 = HST.get () in
//B.popped_modifies h3 h4;
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h3));
[@inline_let]
let _ =
if res
then inv_frame h3 (contents_list p h0 sl pos pos') [] pos' h4
else post_interrupt_frame h3 h4
in
res
#pop-options
//B.loc_includes_union_l (B.loc_all_regions_from false (HS.get_tip h1)) (Ghost.reveal l) (Ghost.reveal l)
//B.modifies_fresh_frame_popped h0 h1 (Ghost.reveal l) h3 h4
module G = FStar.Ghost
inline_for_extraction
let list_fold_left
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
(l1: Ghost.erased (list t)) ->
(x: Ghost.erased t) ->
(l2: Ghost.erased (list t)) ->
HST.Stack unit
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos' /\
valid_content_pos p h0 sl pos1 (G.reveal x) pos2 /\
U32.v pos <= U32.v pos1 /\ U32.v pos2 <= U32.v pos' /\
B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2) /\
inv h (Ghost.reveal l1) (Ghost.reveal x :: Ghost.reveal l2) pos1 /\
contents_list p h0 sl pos pos' == Ghost.reveal l1 `L.append` (Ghost.reveal x :: Ghost.reveal l2)
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (Ghost.reveal l1 `L.append` [contents p h0 sl pos1]) (Ghost.reveal l2) pos2
))
))
: HST.Stack unit
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (contents_list p h sl pos pos') [] pos'
))
= let _ = list_fold_left_gen
p
j
sl
pos pos'
h0
l
inv
inv_frame
(fun _ -> False)
(fun _ _ -> ())
(fun pos1 pos2 ->
let h = HST.get () in
valid_list_cons p h sl pos1 pos';
valid_list_append p h sl pos pos1 pos';
body
pos1
pos2
(Ghost.hide (contents_list p h sl pos pos1))
(Ghost.hide (contents p h sl pos1))
(Ghost.hide (contents_list p h sl pos2 pos'))
;
true
)
in
()
inline_for_extraction
let list_length
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v res == L.length (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
B.fresh_frame_modifies h0 h1;
let blen : BF.pointer U32.t = BF.alloca 0ul 1ul in
let h2 = HST.get () in
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer blen))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_buffer blen) h2 h /\
B.live h blen /\ (
let len = U32.v (Seq.index (B.as_seq h blen) 0) in
len <= U32.v pos1 /\ // necessary to prove that length computations do not overflow
len == L.length l1
))
(fun h l1 l2 pos1 h' ->
B.modifies_only_not_unused_in (B.loc_buffer blen) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
B.upd blen 0ul (B.index blen 0ul `U32.add` 1ul);
Classical.forall_intro_2 (list_length_append #t)
)
;
let len = B.index blen 0ul in
HST.pop_frame ();
len
#push-options "--z3rlimit 32 --fuel 2 --ifuel 1"
inline_for_extraction
let list_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(f: (t -> Tot bool))
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
(x: Ghost.erased t) ->
HST.Stack bool
(requires (fun h -> valid_content p h sl pos (G.reveal x)))
(ensures (fun h res h' -> B.modifies B.loc_none h h' /\ res == f (G.reveal x)))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
(#rrel_out #rel_out: _)
(sl_out : slice rrel_out rel_out)
(pos_out : U32.t)
: HST.Stack U32.t
(requires (fun h ->
U32.v pos_out + U32.v pos' - U32.v pos <= U32.v sl_out.len /\
valid_list p h sl pos pos' /\
B.loc_disjoint (loc_slice_from_to sl pos pos') (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
live_slice h sl_out
))
(ensures (fun h pos_out' h' ->
B.modifies (loc_slice_from_to sl_out pos_out pos_out') h h' /\
U32.v pos_out' - U32.v pos_out <= U32.v pos' - U32.v pos /\
valid_list p h' sl_out pos_out pos_out' /\
contents_list p h' sl_out pos_out pos_out' == L.filter f (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
//B.fresh_frame_modifies h0 h1;
let bpos_out' : BF.pointer U32.t = BF.alloca pos_out 1ul in
let h2 = HST.get () in
let inv (h: HS.mem) (l1 l2: list t) (pos1: U32.t) : GTot Type0 =
B.live h bpos_out' /\ (
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
valid_list p h sl_out pos_out pos_out' /\
contents_list p h sl_out pos_out pos_out' == L.filter f l1 /\
U32.v pos_out' - U32.v pos1 <= U32.v pos_out - U32.v pos // necessary to prove that length computations do not overflow
)
in
valid_list_nil p h2 sl_out pos_out;
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))))
inv
(fun h l1 l2 pos1 h' ->
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies_only_not_unused_in (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
let pos_out1 = B.index bpos_out' 0ul in
list_filter_append f (G.reveal l1) [G.reveal x];
if f' sl pos1 x
then begin
assert (B.loc_includes (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) (loc_slice_from_to sl_out pos_out1 (pos_out1 `U32.add` (pos2 `U32.sub` pos1))));
let h = HST.get () in
writable_weaken sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (U32.v pos_out1) (U32.v pos_out1 + (U32.v pos2 - U32.v pos1));
let pos_out2 = copy_strong p sl pos1 pos2 sl_out pos_out1 in
B.upd bpos_out' 0ul pos_out2;
let h' = HST.get () in
writable_modifies sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (B.loc_region_only true (HS.get_tip h1)) h';
valid_list_nil p h' sl_out pos_out2;
valid_list_cons p h' sl_out pos_out1 pos_out2;
valid_list_append p h' sl_out pos_out pos_out1 pos_out2
end else
L.append_l_nil (L.filter f (G.reveal l1))
)
;
let pos_out' = B.index bpos_out' 0ul in
HST.pop_frame ();
pos_out'
#pop-options
#push-options "--z3rlimit 64 --fuel 2 --ifuel 1"
inline_for_extraction
let list_nth
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(i: U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos' /\
U32.v i < L.length (contents_list p h sl pos pos')
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
valid_list p h sl pos res /\
valid p h sl res /\
valid_list p h sl (get_valid_pos p h sl res) pos' /\
L.length (contents_list p h sl pos res) == U32.v i /\
contents p h sl res == L.index (contents_list p h sl pos pos') (U32.v i)
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bpos1 = BF.alloca pos 1ul in
let bk = BF.alloca 0ul 1ul in
let h2 = HST.get () in
valid_list_nil p h0 sl pos;
let _ : bool = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
let k = Seq.index (B.as_seq h bk) 0 in
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bpos1 /\
B.live h bk /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v k /\
U32.v k <= U32.v i
)
(fun h _ _ _ h' ->
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bpos1);
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bk);
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
let pos1 = Seq.index (B.as_seq h bpos1) 0 in
B.live h bpos1 /\
valid p h0 sl pos1 /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl (get_valid_pos p h0 sl pos1) pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v i /\
contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i)
)
(fun _ _ ->
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 ->
let k = B.index bk 0ul in
if k = i
then begin
B.upd bpos1 0ul pos1;
valid_list_cons_recip p h0 sl pos1 pos';
list_index_append (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') (U32.v i);
valid_list_append p h0 sl pos pos1 pos' ;
assert (contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i));
false
end else begin
B.upd bk 0ul (k `U32.add` 1ul);
let h = HST.get () in
B.modifies_only_not_unused_in B.loc_none h0 h;
valid_list_snoc p h0 sl pos pos1;
assert (valid p h0 sl pos1);
assert (pos2 == get_valid_pos p h0 sl pos1);
assert (valid_list p h0 sl pos pos2);
list_length_append (contents_list p h0 sl pos pos1) [contents p h0 sl pos1];
true
end
)
in
let res = B.index bpos1 0ul in
HST.pop_frame ();
res
inline_for_extraction
let list_find
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(f: (t -> Tot bool)) // should be GTot, but List.find requires Tot
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack bool
(requires (fun h ->
valid p h sl pos
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == f (contents p h sl pos)
))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let l = contents_list p h sl pos pos' in
if res = pos'
then L.find f l == None
else
U32.v pos <= U32.v res /\
valid p h sl res /\ (
let x = contents p h sl res in
U32.v res + content_length p h sl res <= U32.v pos' /\
f x == true /\
L.find f l == Some x
)
)))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bres = BF.alloca 0ul 1ul in
let h2 = HST.get () in
let not_found = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\
valid_list p h0 sl pos1 pos' /\
l2 == contents_list p h0 sl pos1 pos' /\
L.find f (contents_list p h0 sl pos pos') == L.find f l2
)
(fun h _ _ _ h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\ (
let res = Seq.index (B.as_seq h bres) 0 in
U32.v pos <= U32.v res /\
valid p h0 sl res /\ (
let x = contents p h0 sl res in
U32.v res + content_length p h0 sl res <= U32.v pos' /\
f x == true /\
L.find f (contents_list p h0 sl pos pos') == Some x
)))
(fun h h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun pos1 pos2 ->
if f' sl pos1
then begin
B.upd bres 0ul pos1;
false
end
else true
)
in
let res =
if not_found
then pos'
else B.index bres 0ul
in
HST.pop_frame ();
res
#pop-options
let rec list_existsb_find
(#a: Type)
(f: (a -> Tot bool))
(l: list a)
: Lemma
(L.existsb f l == Some? (L.find f l))
= match l with
| [] -> ()
| x :: q ->
if f x
then ()
else list_existsb_find f q
inline_for_extraction
noextract
let list_existsb
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(f: (t -> Tot bool)) // should be GTot, but List.find requires Tot
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack bool
(requires (fun h ->
valid p h sl pos
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == f (contents p h sl pos)
))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack bool
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == L.existsb f (contents_list p h sl pos pos')
))
= let h = HST.get () in
list_existsb_find f (contents_list p h sl pos pos');
let posn = list_find j f f' sl pos pos' in
posn <> pos'
#push-options "--fuel 2 --ifuel 1 --z3rlimit 256 --query_stats"
inline_for_extraction
noextract
let list_flatten_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 { k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0 } )
(f: (t1 -> Tot (list t2))) // should be GTot, but List.Tot.map requires Tot
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1' : U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2: U32.t {
valid_list p1 h0 sl1 pos1 pos1' /\
U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\
U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32
})
(f' : (
(pos1_: U32.t) ->
(pos2_: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\
U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\ (
let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2_ res /\
contents_list p2 h' sl2 pos2_ res == y
)))
))
: HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\ (
let y = List.Tot.flatten (List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1')) in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2 res /\
contents_list p2 h' sl2 pos2 res == y
)))
= let hz = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bpos2_ = BF.alloca pos2 1ul in
let h2 = HST.get () in
valid_list_nil p2 hz sl2 pos2;
let fits = list_fold_left_gen
p1
j1
sl1
pos1 pos1'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2))
(fun h ll lr _ ->
B.modifies (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h /\
B.live h bpos2_ /\ (
let pos2_ = Seq.index (B.as_seq h bpos2_) 0 in
contents_list p1 h0 sl1 pos1 pos1' == ll `List.Tot.append` lr /\
valid_list p2 h sl2 pos2 pos2_ /\
contents_list p2 h sl2 pos2 pos2_ == List.Tot.flatten (List.Tot.map f ll) /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(fun h _ _ _ h' ->
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun h ->
B.modifies (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h /\
U32.v pos2 + serialized_list_length s2 (List.Tot.flatten (List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1'))) > U32.v sl2.len
)
(fun h h' ->
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1l pos1r ->
let pos2_ = B.index bpos2_ 0ul in
let h = HST.get () in
writable_weaken sl2.base (U32.v pos2) (U32.v sl2.len) h (U32.v pos2_) (U32.v sl2.len);
valid_pos_frame_strong p1 h0 sl1 pos1l pos1r (loc_slice_from sl2 pos2) hz;
let res = f' pos1l pos2_ in
let fits = not (res = max_uint32) in
if fits then begin
B.upd bpos2_ 0ul res;
let h' = HST.get () in
writable_modifies sl2.base (U32.v pos2) (U32.v sl2.len) h (B.loc_region_only true (HS.get_tip h1)) h' ;
List.Tot.append_assoc (contents_list p1 h0 sl1 pos1 pos1l) [contents p1 h0 sl1 pos1l] (contents_list p1 h0 sl1 pos1r pos1');
list_flatten_map_append f (contents_list p1 h0 sl1 pos1 pos1l) [contents p1 h0 sl1 pos1l];
valid_list_snoc p1 h0 sl1 pos1 pos1l;
valid_list_append p2 h' sl2 pos2 pos2_ res;
valid_list_nil p2 h' sl2 res;
valid_list_append p2 h' sl2 pos2_ res res
end else begin
let h' = HST.get () in
valid_list_cons p1 h0 sl1 pos1l pos1' ;
valid_list_append p1 h0 sl1 pos1 pos1l pos1' ;
list_flatten_map_append f (contents_list p1 h0 sl1 pos1 pos1l) (contents_list p1 h0 sl1 pos1l pos1');
serialized_list_length_append s2 (L.flatten (L.map f (contents_list p1 h0 sl1 pos1 pos1l))) (L.flatten (L.map f (contents_list p1 h0 sl1 pos1l pos1')));
serialized_list_length_append s2 (f (contents p1 h0 sl1 pos1l)) (L.flatten (L.map f (contents_list p1 h0 sl1 pos1r pos1')));
valid_list_serialized_list_length s2 h' sl2 pos2 pos2_
end;
fits
)
in
let res =
if fits
then B.index bpos2_ 0ul
else max_uint32
in
HST.pop_frame ();
res
#pop-options
#push-options "--z3rlimit 16 --query_stats"
inline_for_extraction
noextract
let list_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 { k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0 } )
(f: (t1 -> Tot t2)) // should be GTot, but List.Tot.map requires Tot
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1' : U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2: U32.t {
valid_list p1 h0 sl1 pos1 pos1' /\
U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\
U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32
})
(f' : (
(pos1_: U32.t) ->
(pos2_: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\
U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\ (
let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_length s2 y > U32.v sl2.len
else
valid_content_pos p2 h' sl2 pos2_ y res
)))
))
: HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\ (
let y = List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1') in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2 res /\
contents_list p2 h' sl2 pos2 res == y
)))
= list_map_list_flatten_map f (contents_list p1 h0 sl1 pos1 pos1');
list_flatten_map
j1
s2
(fun x -> [f x])
h0
sl1 pos1 pos1'
sl2 pos2
(fun pos1 pos2 ->
let res = f' pos1 pos2 in
let h = HST.get () in
if res = max_uint32
then begin
serialized_list_length_nil s2;
serialized_list_length_cons s2 (f (contents p1 h0 sl1 pos1)) []
end
else begin
valid_list_nil p2 h sl2 res;
valid_list_cons p2 h sl2 pos2 res
end;
res
)
(* Example: trivial printers *)
inline_for_extraction
let print_list
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(print: ((#rrel: _) -> (#rel: _) -> (sl: slice rrel rel) -> (pos: U32.t) -> HST.Stack unit (requires (fun h -> valid p h sl pos)) (ensures (fun h _ h' -> B.modifies B.loc_none h h'))))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack unit
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h _ h' ->
B.modifies B.loc_none h h' | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val print_list
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(print:
(#rrel: _ -> #rel: _ -> sl: slice rrel rel -> pos: U32.t
-> HST.Stack unit
(requires (fun h -> valid p h sl pos))
(ensures (fun h _ h' -> B.modifies B.loc_none h h'))))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos': U32.t)
: HST.Stack unit
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h _ h' -> B.modifies B.loc_none h h')) | [] | LowParse.Low.Base.print_list | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
j: LowParse.Low.Base.jumper p ->
print:
(sl: LowParse.Slice.slice rrel rel -> pos: FStar.UInt32.t
-> FStar.HyperStack.ST.Stack Prims.unit) ->
sl: LowParse.Slice.slice rrel rel ->
pos: FStar.UInt32.t ->
pos': FStar.UInt32.t
-> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 5,
"end_line": 2002,
"start_col": 1,
"start_line": 1990
} |
Prims.Tot | val leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s)
= fun x #rrel #rel input pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let len = s32 x input.base pos in
[@inline_let]
let pos' = pos `U32.add` len in
let h = HST.get () in
[@inline_let] let _ =
let large = bytes_of_slice_from h input pos in
let small = bytes_of_slice_from_to h input pos pos' in
parse_strong_prefix p small large;
valid_facts p h input pos
in
pos' | val leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s)
let leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s) = | false | null | false | fun x #rrel #rel input pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let len = s32 x input.base pos in
[@@ inline_let ]let pos' = pos `U32.add` len in
let h = HST.get () in
[@@ inline_let ]let _ =
let large = bytes_of_slice_from h input pos in
let small = bytes_of_slice_from_to h input pos pos' in
parse_strong_prefix p small large;
valid_facts p h input pos
in
pos' | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"LowParse.Low.Base.serializer32",
"Prims.squash",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"FStar.UInt32.t",
"Prims.unit",
"LowParse.Low.Base.Spec.valid_facts",
"LowParse.Spec.Base.parse_strong_prefix",
"LowParse.Bytes.bytes",
"LowParse.Low.Base.Spec.bytes_of_slice_from_to",
"LowParse.Slice.bytes_of_slice_from",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"FStar.UInt32.add",
"LowParse.Slice.buffer_srel_of_srel",
"LowParse.Slice.__proj__Mkslice__item__base",
"LowParse.Low.Base.Spec.serialized_length_eq",
"LowParse.Low.Base.leaf_writer_strong"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_weak
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
U32.v sl.len < U32.v max_uint32 /\
writable sl.base (U32.v pos) (U32.v sl.len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from sl pos) h h' /\ (
if pos' = max_uint32
then U32.v pos + serialized_length s x > U32.v sl.len
else valid_content_pos p h' sl pos x pos'
)))
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_strong
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let sq = B.as_seq h sl.base in
let len = serialized_length s x in
live_slice h sl /\
U32.v pos + len <= U32.v sl.len /\
writable sl.base (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from_to sl pos pos') h h' /\
valid_content_pos p h' sl pos x pos'
))
[@unifier_hint_injective]
inline_for_extraction
let serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(b: B.mbuffer byte rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v pos + len <= B.length b /\
writable b (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b pos (pos `U32.add` len)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x
)))
inline_for_extraction
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
= fun x #rrel #rel b pos -> s1' x b pos
inline_for_extraction
let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(posl: Ghost.erased U32.t)
(posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\
U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos) /\
Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr))
)))
=
let h0 = HST.get () in
writable_weaken b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 (U32.v pos) (U32.v pos + Seq.length (serialize s x));
let res = s32 x b pos in
let h1 = HST.get () in
let pos' = pos `U32.add` res in
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos pos';
writable_modifies b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 B.loc_none h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) (Ghost.reveal posl) pos;
B.loc_disjoint_loc_buffer_from_to b (Ghost.reveal posl) pos pos pos';
B.modifies_buffer_from_to_elim b (Ghost.reveal posl) pos (B.loc_buffer_from_to b pos pos') h0 h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos' (Ghost.reveal posr);
B.loc_disjoint_loc_buffer_from_to b pos pos' pos' (Ghost.reveal posr);
B.modifies_buffer_from_to_elim b pos' (Ghost.reveal posr) (B.loc_buffer_from_to b pos pos') h0 h1;
res
inline_for_extraction
let leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong)) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s) | [] | LowParse.Low.Base.leaf_writer_strong_of_serializer32 | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
s32: LowParse.Low.Base.serializer32 s ->
u299:
Prims.squash (Mkparser_kind'?.parser_kind_subkind k ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong)
-> LowParse.Low.Base.leaf_writer_strong s | {
"end_col": 6,
"end_line": 931,
"start_col": 2,
"start_line": 918
} |
FStar.Pervasives.Lemma | val writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': nat)
(sl': Seq.seq t)
(h: HS.mem)
: Lemma
(requires
(writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos)
)
(ensures
(let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\ writable b pos pos' (B.g_upd_seq b s' h))) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h' | val writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': nat)
(sl': Seq.seq t)
(h: HS.mem)
: Lemma
(requires
(writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos)
)
(ensures
(let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\ writable b pos pos' (B.g_upd_seq b s' h)))
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': nat)
(sl': Seq.seq t)
(h: HS.mem)
: Lemma
(requires
(writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos)
)
(ensures
(let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\ writable b pos pos' (B.g_upd_seq b s' h))) = | false | null | true | let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h;
writable_replace_subseq b pos pos' h sl' h' | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.nat",
"FStar.Seq.Base.seq",
"FStar.Monotonic.HyperStack.mem",
"LowParse.Low.Base.writable_replace_subseq",
"Prims.unit",
"LowStar.Monotonic.Buffer.g_upd_seq_as_seq",
"LowStar.Monotonic.Buffer.g_upd_seq",
"FStar.Seq.Properties.replace_subseq",
"LowStar.Monotonic.Buffer.as_seq",
"Prims.l_and",
"LowParse.Low.Base.writable",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"LowStar.Monotonic.Buffer.length",
"Prims.eq2",
"Prims.int",
"FStar.Seq.Base.length",
"Prims.op_Subtraction",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': nat)
(sl': Seq.seq t)
(h: HS.mem)
: Lemma
(requires
(writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos)
)
(ensures
(let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\ writable b pos pos' (B.g_upd_seq b s' h))) | [] | LowParse.Low.Base.writable_upd_seq | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
b: LowStar.Monotonic.Buffer.mbuffer t rrel rel ->
pos: Prims.nat ->
pos': Prims.nat ->
sl': FStar.Seq.Base.seq t ->
h: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
LowParse.Low.Base.writable b pos pos' h /\ pos <= pos' /\
pos' <= LowStar.Monotonic.Buffer.length b /\ FStar.Seq.Base.length sl' == pos' - pos)
(ensures
(let s = LowStar.Monotonic.Buffer.as_seq h b in
let s' = FStar.Seq.Properties.replace_subseq s pos pos' sl' in
rel s s' /\
LowParse.Low.Base.writable b pos pos' (LowStar.Monotonic.Buffer.g_upd_seq b s' h))) | {
"end_col": 45,
"end_line": 697,
"start_col": 1,
"start_line": 693
} |
FStar.Pervasives.Lemma | val list_existsb_find (#a: Type) (f: (a -> Tot bool)) (l: list a)
: Lemma (L.existsb f l == Some? (L.find f l)) | [
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "BF"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec list_existsb_find
(#a: Type)
(f: (a -> Tot bool))
(l: list a)
: Lemma
(L.existsb f l == Some? (L.find f l))
= match l with
| [] -> ()
| x :: q ->
if f x
then ()
else list_existsb_find f q | val list_existsb_find (#a: Type) (f: (a -> Tot bool)) (l: list a)
: Lemma (L.existsb f l == Some? (L.find f l))
let rec list_existsb_find (#a: Type) (f: (a -> Tot bool)) (l: list a)
: Lemma (L.existsb f l == Some? (L.find f l)) = | false | null | true | match l with
| [] -> ()
| x :: q -> if f x then () else list_existsb_find f q | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"lemma"
] | [
"Prims.bool",
"Prims.list",
"LowParse.Low.Base.list_existsb_find",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"FStar.List.Tot.Base.existsb",
"FStar.Pervasives.Native.uu___is_Some",
"Prims.b2t",
"FStar.List.Tot.Base.find",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_weak
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
U32.v sl.len < U32.v max_uint32 /\
writable sl.base (U32.v pos) (U32.v sl.len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from sl pos) h h' /\ (
if pos' = max_uint32
then U32.v pos + serialized_length s x > U32.v sl.len
else valid_content_pos p h' sl pos x pos'
)))
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_strong
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let sq = B.as_seq h sl.base in
let len = serialized_length s x in
live_slice h sl /\
U32.v pos + len <= U32.v sl.len /\
writable sl.base (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from_to sl pos pos') h h' /\
valid_content_pos p h' sl pos x pos'
))
[@unifier_hint_injective]
inline_for_extraction
let serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(b: B.mbuffer byte rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v pos + len <= B.length b /\
writable b (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b pos (pos `U32.add` len)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x
)))
inline_for_extraction
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
= fun x #rrel #rel b pos -> s1' x b pos
inline_for_extraction
let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(posl: Ghost.erased U32.t)
(posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\
U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos) /\
Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr))
)))
=
let h0 = HST.get () in
writable_weaken b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 (U32.v pos) (U32.v pos + Seq.length (serialize s x));
let res = s32 x b pos in
let h1 = HST.get () in
let pos' = pos `U32.add` res in
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos pos';
writable_modifies b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 B.loc_none h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) (Ghost.reveal posl) pos;
B.loc_disjoint_loc_buffer_from_to b (Ghost.reveal posl) pos pos pos';
B.modifies_buffer_from_to_elim b (Ghost.reveal posl) pos (B.loc_buffer_from_to b pos pos') h0 h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos' (Ghost.reveal posr);
B.loc_disjoint_loc_buffer_from_to b pos pos' pos' (Ghost.reveal posr);
B.modifies_buffer_from_to_elim b pos' (Ghost.reveal posr) (B.loc_buffer_from_to b pos pos') h0 h1;
res
inline_for_extraction
let leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s)
= fun x #rrel #rel input pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let len = s32 x input.base pos in
[@inline_let]
let pos' = pos `U32.add` len in
let h = HST.get () in
[@inline_let] let _ =
let large = bytes_of_slice_from h input pos in
let small = bytes_of_slice_from_to h input pos pos' in
parse_strong_prefix p small large;
valid_facts p h input pos
in
pos'
inline_for_extraction
let leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32
))
: Tot (leaf_writer_weak s)
= fun x #rrel #rel input pos ->
if (input.len `U32.sub` pos) `U32.lt` sz
then max_uint32
else begin
let h = HST.get () in
writable_weaken input.base (U32.v pos) (U32.v input.len) h (U32.v pos) (U32.v pos + U32.v sz);
s32 x input pos
end
inline_for_extraction
let serializer32_of_leaf_writer_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_subkind == Some ParserStrong /\
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
))
: Tot (serializer32 s)
= fun x #rrel #rel b pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let pos' = s32 x (make_slice b (pos `U32.add` sz)) pos in
[@inline_let]
let len = pos' `U32.sub` pos in
let h = HST.get () in
[@inline_let] let _ =
valid_valid_exact p h (make_slice b (pos `U32.add` sz)) pos;
valid_exact_serialize s h (make_slice b (pos `U32.add` sz)) pos pos'
in
len
inline_for_extraction
let blit_strong
(#a:Type) (#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src:U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst:U32.t)
(len:U32.t)
: HST.Stack unit
(requires (fun h ->
B.live h src /\ B.live h dst /\
U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len)) (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)
))
= let h = HST.get () in
B.blit src idx_src dst idx_dst len;
let h' = HST.get () in
B.modifies_loc_buffer_from_to_intro dst idx_dst (idx_dst `U32.add` len) B.loc_none h h'
#push-options "--z3rlimit 16"
inline_for_extraction
let copy_strong
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
U32.v dpos + U32.v spos' - U32.v spos <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from_to dst dpos (dpos `U32.add` (spos' `U32.sub` spos)))
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos' /\
dpos' `U32.sub` dpos == spos' `U32.sub` spos
))
= let h0 = HST.get () in
let len = spos' `U32.sub` spos in
valid_facts p h0 src spos;
writable_replace_subseq_elim dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h0 (Seq.slice (B.as_seq h0 src.base) (U32.v spos) (U32.v spos'));
blit_strong src.base spos dst.base dpos len;
let h = HST.get () in
[@inline_let] let dpos' = dpos `U32.add` len in
parse_strong_prefix p (bytes_of_slice_from h0 src spos) (bytes_of_slice_from h dst dpos);
valid_facts p h dst dpos;
dpos'
#pop-options
inline_for_extraction
let copy_strong'
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\ (
let clen = content_length p h src spos in
U32.v dpos + clen <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + clen) h /\
B.loc_disjoint (loc_slice_from src spos) (loc_slice_from_to dst dpos (dpos `U32.add` (U32.uint_to_t clen)))
)))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
))
= let spos' = j src spos in
copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak_with_length
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= if (dst.len `U32.sub` dpos) `U32.lt` (spos' `U32.sub` spos)
then max_uint32
else copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(jmp: jumper p)
(src: slice rrel1 rel1)
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (content_length p h src spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos (get_valid_pos p h src spos)) (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= let spos' = jmp src spos in
copy_weak_with_length p src spos spos' dst dpos
(* fold_left on lists *)
module BF = LowStar.Buffer
#push-options "--z3rlimit 256 --fuel 1 --ifuel 1"
#restart-solver
inline_for_extraction
let list_fold_left_gen
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(post_interrupt: ((h: HS.mem) -> GTot Type0))
(post_interrupt_frame: (h: HS.mem) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
post_interrupt h
)) (ensures (post_interrupt h')))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
HST.Stack bool
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos1 /\
valid_pos p h0 sl pos1 pos2 /\
valid_list p h0 sl pos2 pos' /\
inv h (contents_list p h0 sl pos pos1) (contents p h0 sl pos1 :: contents_list p h0 sl pos2 pos') pos1
))
(ensures (fun h ctinue h' ->
B.modifies (Ghost.reveal l) h h' /\
(if ctinue then inv h' (contents_list p h0 sl pos pos1 `L.append` [contents p h0 sl pos1]) (contents_list p h0 sl pos2 pos') pos2 else post_interrupt h')
))
))
: HST.Stack bool
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h res h' ->
B.modifies (Ghost.reveal l) h h' /\
(if res then inv h' (contents_list p h sl pos pos') [] pos' else post_interrupt h')
))
= HST.push_frame ();
let h1 = HST.get () in
// B.fresh_frame_modifies h0 h1;
let bpos : BF.pointer U32.t = BF.alloca pos 1ul in
let bctinue : BF.pointer bool = BF.alloca true 1ul in
let btest: BF.pointer bool = BF.alloca (pos `U32.lt` pos') 1ul in
let h2 = HST.get () in
assert (B.modifies B.loc_none h0 h2);
let test_pre (h: HS.mem) : GTot Type0 =
B.live h bpos /\ B.live h bctinue /\ B.live h btest /\ (
let pos1 = Seq.index (B.as_seq h bpos) 0 in
let ctinue = Seq.index (B.as_seq h bctinue) 0 in
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
B.modifies (Ghost.reveal l `B.loc_union` B.loc_region_only true (HS.get_tip h1)) h2 h /\
Seq.index (B.as_seq h btest) 0 == ((U32.v (Seq.index (B.as_seq h bpos) 0) < U32.v pos') && Seq.index (B.as_seq h bctinue) 0) /\
(if ctinue then inv h (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 else post_interrupt h)
)
in
let test_post (cond: bool) (h: HS.mem) : GTot Type0 =
test_pre h /\
cond == Seq.index (B.as_seq h btest) 0
in
valid_list_nil p h0 sl pos;
inv_frame h0 [] (contents_list p h0 sl pos pos') pos h1;
inv_frame h1 [] (contents_list p h0 sl pos pos') pos h2;
[@inline_let]
let while_body () : HST.Stack unit
(requires (fun h -> test_post true h))
(ensures (fun _ _ h1 -> test_pre h1))
=
let h51 = HST.get () in
let pos1 = B.index bpos 0ul in
valid_list_cons_recip p h0 sl pos1 pos';
//assert (B.modifies (Ghost.reveal l `B.loc_union` B.loc_buffer bpos) h0 h51);
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos'));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos pos1));
valid_list_cons_recip p h51 sl pos1 pos';
let pos2 = j sl pos1 in
let h52 = HST.get () in
inv_frame h51 (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 h52;
B.modifies_only_not_unused_in (Ghost.reveal l) h0 h52;
let ctinue = body pos1 pos2 in
let h53 = HST.get () in
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos2 pos'));
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h1));
valid_pos_frame_strong p h0 sl pos1 pos2 (Ghost.reveal l) h53;
valid_list_snoc p h0 sl pos pos1;
B.upd bpos 0ul pos2;
B.upd bctinue 0ul ctinue;
B.upd btest 0ul ((pos2 `U32.lt` pos') && ctinue);
let h54 = HST.get () in
[@inline_let]
let _ =
if ctinue
then inv_frame h53 (contents_list p h0 sl pos pos2) (contents_list p h0 sl pos2 pos') pos2 h54
else post_interrupt_frame h53 h54
in
()
in
C.Loops.while
#test_pre
#test_post
(fun (_: unit) -> (
B.index btest 0ul) <: HST.Stack bool (requires (fun h -> test_pre h)) (ensures (fun h x h1 -> test_post x h1)))
while_body
;
valid_list_nil p h0 sl pos';
let res = B.index bctinue 0ul in
let h3 = HST.get () in
HST.pop_frame ();
let h4 = HST.get () in
//B.popped_modifies h3 h4;
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h3));
[@inline_let]
let _ =
if res
then inv_frame h3 (contents_list p h0 sl pos pos') [] pos' h4
else post_interrupt_frame h3 h4
in
res
#pop-options
//B.loc_includes_union_l (B.loc_all_regions_from false (HS.get_tip h1)) (Ghost.reveal l) (Ghost.reveal l)
//B.modifies_fresh_frame_popped h0 h1 (Ghost.reveal l) h3 h4
module G = FStar.Ghost
inline_for_extraction
let list_fold_left
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
(l1: Ghost.erased (list t)) ->
(x: Ghost.erased t) ->
(l2: Ghost.erased (list t)) ->
HST.Stack unit
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos' /\
valid_content_pos p h0 sl pos1 (G.reveal x) pos2 /\
U32.v pos <= U32.v pos1 /\ U32.v pos2 <= U32.v pos' /\
B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2) /\
inv h (Ghost.reveal l1) (Ghost.reveal x :: Ghost.reveal l2) pos1 /\
contents_list p h0 sl pos pos' == Ghost.reveal l1 `L.append` (Ghost.reveal x :: Ghost.reveal l2)
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (Ghost.reveal l1 `L.append` [contents p h0 sl pos1]) (Ghost.reveal l2) pos2
))
))
: HST.Stack unit
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (contents_list p h sl pos pos') [] pos'
))
= let _ = list_fold_left_gen
p
j
sl
pos pos'
h0
l
inv
inv_frame
(fun _ -> False)
(fun _ _ -> ())
(fun pos1 pos2 ->
let h = HST.get () in
valid_list_cons p h sl pos1 pos';
valid_list_append p h sl pos pos1 pos';
body
pos1
pos2
(Ghost.hide (contents_list p h sl pos pos1))
(Ghost.hide (contents p h sl pos1))
(Ghost.hide (contents_list p h sl pos2 pos'))
;
true
)
in
()
inline_for_extraction
let list_length
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v res == L.length (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
B.fresh_frame_modifies h0 h1;
let blen : BF.pointer U32.t = BF.alloca 0ul 1ul in
let h2 = HST.get () in
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer blen))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_buffer blen) h2 h /\
B.live h blen /\ (
let len = U32.v (Seq.index (B.as_seq h blen) 0) in
len <= U32.v pos1 /\ // necessary to prove that length computations do not overflow
len == L.length l1
))
(fun h l1 l2 pos1 h' ->
B.modifies_only_not_unused_in (B.loc_buffer blen) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
B.upd blen 0ul (B.index blen 0ul `U32.add` 1ul);
Classical.forall_intro_2 (list_length_append #t)
)
;
let len = B.index blen 0ul in
HST.pop_frame ();
len
#push-options "--z3rlimit 32 --fuel 2 --ifuel 1"
inline_for_extraction
let list_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(f: (t -> Tot bool))
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
(x: Ghost.erased t) ->
HST.Stack bool
(requires (fun h -> valid_content p h sl pos (G.reveal x)))
(ensures (fun h res h' -> B.modifies B.loc_none h h' /\ res == f (G.reveal x)))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
(#rrel_out #rel_out: _)
(sl_out : slice rrel_out rel_out)
(pos_out : U32.t)
: HST.Stack U32.t
(requires (fun h ->
U32.v pos_out + U32.v pos' - U32.v pos <= U32.v sl_out.len /\
valid_list p h sl pos pos' /\
B.loc_disjoint (loc_slice_from_to sl pos pos') (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
live_slice h sl_out
))
(ensures (fun h pos_out' h' ->
B.modifies (loc_slice_from_to sl_out pos_out pos_out') h h' /\
U32.v pos_out' - U32.v pos_out <= U32.v pos' - U32.v pos /\
valid_list p h' sl_out pos_out pos_out' /\
contents_list p h' sl_out pos_out pos_out' == L.filter f (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
//B.fresh_frame_modifies h0 h1;
let bpos_out' : BF.pointer U32.t = BF.alloca pos_out 1ul in
let h2 = HST.get () in
let inv (h: HS.mem) (l1 l2: list t) (pos1: U32.t) : GTot Type0 =
B.live h bpos_out' /\ (
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
valid_list p h sl_out pos_out pos_out' /\
contents_list p h sl_out pos_out pos_out' == L.filter f l1 /\
U32.v pos_out' - U32.v pos1 <= U32.v pos_out - U32.v pos // necessary to prove that length computations do not overflow
)
in
valid_list_nil p h2 sl_out pos_out;
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))))
inv
(fun h l1 l2 pos1 h' ->
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies_only_not_unused_in (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
let pos_out1 = B.index bpos_out' 0ul in
list_filter_append f (G.reveal l1) [G.reveal x];
if f' sl pos1 x
then begin
assert (B.loc_includes (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) (loc_slice_from_to sl_out pos_out1 (pos_out1 `U32.add` (pos2 `U32.sub` pos1))));
let h = HST.get () in
writable_weaken sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (U32.v pos_out1) (U32.v pos_out1 + (U32.v pos2 - U32.v pos1));
let pos_out2 = copy_strong p sl pos1 pos2 sl_out pos_out1 in
B.upd bpos_out' 0ul pos_out2;
let h' = HST.get () in
writable_modifies sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (B.loc_region_only true (HS.get_tip h1)) h';
valid_list_nil p h' sl_out pos_out2;
valid_list_cons p h' sl_out pos_out1 pos_out2;
valid_list_append p h' sl_out pos_out pos_out1 pos_out2
end else
L.append_l_nil (L.filter f (G.reveal l1))
)
;
let pos_out' = B.index bpos_out' 0ul in
HST.pop_frame ();
pos_out'
#pop-options
#push-options "--z3rlimit 64 --fuel 2 --ifuel 1"
inline_for_extraction
let list_nth
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(i: U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos' /\
U32.v i < L.length (contents_list p h sl pos pos')
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
valid_list p h sl pos res /\
valid p h sl res /\
valid_list p h sl (get_valid_pos p h sl res) pos' /\
L.length (contents_list p h sl pos res) == U32.v i /\
contents p h sl res == L.index (contents_list p h sl pos pos') (U32.v i)
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bpos1 = BF.alloca pos 1ul in
let bk = BF.alloca 0ul 1ul in
let h2 = HST.get () in
valid_list_nil p h0 sl pos;
let _ : bool = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
let k = Seq.index (B.as_seq h bk) 0 in
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bpos1 /\
B.live h bk /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v k /\
U32.v k <= U32.v i
)
(fun h _ _ _ h' ->
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bpos1);
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bk);
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
let pos1 = Seq.index (B.as_seq h bpos1) 0 in
B.live h bpos1 /\
valid p h0 sl pos1 /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl (get_valid_pos p h0 sl pos1) pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v i /\
contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i)
)
(fun _ _ ->
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 ->
let k = B.index bk 0ul in
if k = i
then begin
B.upd bpos1 0ul pos1;
valid_list_cons_recip p h0 sl pos1 pos';
list_index_append (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') (U32.v i);
valid_list_append p h0 sl pos pos1 pos' ;
assert (contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i));
false
end else begin
B.upd bk 0ul (k `U32.add` 1ul);
let h = HST.get () in
B.modifies_only_not_unused_in B.loc_none h0 h;
valid_list_snoc p h0 sl pos pos1;
assert (valid p h0 sl pos1);
assert (pos2 == get_valid_pos p h0 sl pos1);
assert (valid_list p h0 sl pos pos2);
list_length_append (contents_list p h0 sl pos pos1) [contents p h0 sl pos1];
true
end
)
in
let res = B.index bpos1 0ul in
HST.pop_frame ();
res
inline_for_extraction
let list_find
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(f: (t -> Tot bool)) // should be GTot, but List.find requires Tot
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack bool
(requires (fun h ->
valid p h sl pos
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == f (contents p h sl pos)
))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let l = contents_list p h sl pos pos' in
if res = pos'
then L.find f l == None
else
U32.v pos <= U32.v res /\
valid p h sl res /\ (
let x = contents p h sl res in
U32.v res + content_length p h sl res <= U32.v pos' /\
f x == true /\
L.find f l == Some x
)
)))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bres = BF.alloca 0ul 1ul in
let h2 = HST.get () in
let not_found = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\
valid_list p h0 sl pos1 pos' /\
l2 == contents_list p h0 sl pos1 pos' /\
L.find f (contents_list p h0 sl pos pos') == L.find f l2
)
(fun h _ _ _ h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\ (
let res = Seq.index (B.as_seq h bres) 0 in
U32.v pos <= U32.v res /\
valid p h0 sl res /\ (
let x = contents p h0 sl res in
U32.v res + content_length p h0 sl res <= U32.v pos' /\
f x == true /\
L.find f (contents_list p h0 sl pos pos') == Some x
)))
(fun h h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun pos1 pos2 ->
if f' sl pos1
then begin
B.upd bres 0ul pos1;
false
end
else true
)
in
let res =
if not_found
then pos'
else B.index bres 0ul
in
HST.pop_frame ();
res
#pop-options
let rec list_existsb_find
(#a: Type)
(f: (a -> Tot bool))
(l: list a)
: Lemma | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val list_existsb_find (#a: Type) (f: (a -> Tot bool)) (l: list a)
: Lemma (L.existsb f l == Some? (L.find f l)) | [
"recursion"
] | LowParse.Low.Base.list_existsb_find | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | f: (_: a -> Prims.bool) -> l: Prims.list a
-> FStar.Pervasives.Lemma
(ensures FStar.List.Tot.Base.existsb f l == Some? (FStar.List.Tot.Base.find f l)) | {
"end_col": 30,
"end_line": 1706,
"start_col": 2,
"start_line": 1701
} |
Prims.GTot | val wvalid
(#t: Type)
(#k: parser_kind)
(p: parser k t)
(#rrel #rel: _)
(s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
(gpos': Ghost.erased U32.t)
(gv: Ghost.erased t)
(x: Seq.seq byte)
: GTot prop | [
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "BF"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let wvalid
(#t: Type) (#k: parser_kind) (p: parser k t) (#rrel #rel: _) (s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
(gpos' : Ghost.erased U32.t)
(gv: Ghost.erased t)
(x: Seq.seq byte)
: GTot prop
=
U32.v pos <= U32.v (Ghost.reveal gpos') /\
U32.v (Ghost.reveal gpos') <= U32.v s.len /\
U32.v s.len <= Seq.length x /\
parse p (Seq.slice x (U32.v pos) (U32.v s.len)) == Some (Ghost.reveal gv, U32.v (Ghost.reveal gpos') - U32.v pos) /\
compl pos (Ghost.reveal gv) (Ghost.reveal gpos') x | val wvalid
(#t: Type)
(#k: parser_kind)
(p: parser k t)
(#rrel #rel: _)
(s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
(gpos': Ghost.erased U32.t)
(gv: Ghost.erased t)
(x: Seq.seq byte)
: GTot prop
let wvalid
(#t: Type)
(#k: parser_kind)
(p: parser k t)
(#rrel #rel: _)
(s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
(gpos': Ghost.erased U32.t)
(gv: Ghost.erased t)
(x: Seq.seq byte)
: GTot prop = | false | null | false | U32.v pos <= U32.v (Ghost.reveal gpos') /\ U32.v (Ghost.reveal gpos') <= U32.v s.len /\
U32.v s.len <= Seq.length x /\
parse p (Seq.slice x (U32.v pos) (U32.v s.len)) ==
Some (Ghost.reveal gv, U32.v (Ghost.reveal gpos') - U32.v pos) /\
compl pos (Ghost.reveal gv) (Ghost.reveal gpos') x | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"sometrivial"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"LowParse.Low.Base.compl_t",
"FStar.UInt32.t",
"FStar.Ghost.erased",
"FStar.Seq.Base.seq",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.UInt32.v",
"FStar.Ghost.reveal",
"LowParse.Slice.__proj__Mkslice__item__len",
"FStar.Seq.Base.length",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"FStar.Seq.Base.slice",
"LowParse.Spec.Base.parse",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"Prims.op_Subtraction",
"Prims.prop"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_weak
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
U32.v sl.len < U32.v max_uint32 /\
writable sl.base (U32.v pos) (U32.v sl.len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from sl pos) h h' /\ (
if pos' = max_uint32
then U32.v pos + serialized_length s x > U32.v sl.len
else valid_content_pos p h' sl pos x pos'
)))
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_strong
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let sq = B.as_seq h sl.base in
let len = serialized_length s x in
live_slice h sl /\
U32.v pos + len <= U32.v sl.len /\
writable sl.base (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from_to sl pos pos') h h' /\
valid_content_pos p h' sl pos x pos'
))
[@unifier_hint_injective]
inline_for_extraction
let serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(b: B.mbuffer byte rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v pos + len <= B.length b /\
writable b (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b pos (pos `U32.add` len)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x
)))
inline_for_extraction
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
= fun x #rrel #rel b pos -> s1' x b pos
inline_for_extraction
let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(posl: Ghost.erased U32.t)
(posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\
U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos) /\
Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr))
)))
=
let h0 = HST.get () in
writable_weaken b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 (U32.v pos) (U32.v pos + Seq.length (serialize s x));
let res = s32 x b pos in
let h1 = HST.get () in
let pos' = pos `U32.add` res in
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos pos';
writable_modifies b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 B.loc_none h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) (Ghost.reveal posl) pos;
B.loc_disjoint_loc_buffer_from_to b (Ghost.reveal posl) pos pos pos';
B.modifies_buffer_from_to_elim b (Ghost.reveal posl) pos (B.loc_buffer_from_to b pos pos') h0 h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos' (Ghost.reveal posr);
B.loc_disjoint_loc_buffer_from_to b pos pos' pos' (Ghost.reveal posr);
B.modifies_buffer_from_to_elim b pos' (Ghost.reveal posr) (B.loc_buffer_from_to b pos pos') h0 h1;
res
inline_for_extraction
let leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s)
= fun x #rrel #rel input pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let len = s32 x input.base pos in
[@inline_let]
let pos' = pos `U32.add` len in
let h = HST.get () in
[@inline_let] let _ =
let large = bytes_of_slice_from h input pos in
let small = bytes_of_slice_from_to h input pos pos' in
parse_strong_prefix p small large;
valid_facts p h input pos
in
pos'
inline_for_extraction
let leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32
))
: Tot (leaf_writer_weak s)
= fun x #rrel #rel input pos ->
if (input.len `U32.sub` pos) `U32.lt` sz
then max_uint32
else begin
let h = HST.get () in
writable_weaken input.base (U32.v pos) (U32.v input.len) h (U32.v pos) (U32.v pos + U32.v sz);
s32 x input pos
end
inline_for_extraction
let serializer32_of_leaf_writer_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_subkind == Some ParserStrong /\
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
))
: Tot (serializer32 s)
= fun x #rrel #rel b pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let pos' = s32 x (make_slice b (pos `U32.add` sz)) pos in
[@inline_let]
let len = pos' `U32.sub` pos in
let h = HST.get () in
[@inline_let] let _ =
valid_valid_exact p h (make_slice b (pos `U32.add` sz)) pos;
valid_exact_serialize s h (make_slice b (pos `U32.add` sz)) pos pos'
in
len
inline_for_extraction
let blit_strong
(#a:Type) (#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src:U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst:U32.t)
(len:U32.t)
: HST.Stack unit
(requires (fun h ->
B.live h src /\ B.live h dst /\
U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len)) (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)
))
= let h = HST.get () in
B.blit src idx_src dst idx_dst len;
let h' = HST.get () in
B.modifies_loc_buffer_from_to_intro dst idx_dst (idx_dst `U32.add` len) B.loc_none h h'
#push-options "--z3rlimit 16"
inline_for_extraction
let copy_strong
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
U32.v dpos + U32.v spos' - U32.v spos <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from_to dst dpos (dpos `U32.add` (spos' `U32.sub` spos)))
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos' /\
dpos' `U32.sub` dpos == spos' `U32.sub` spos
))
= let h0 = HST.get () in
let len = spos' `U32.sub` spos in
valid_facts p h0 src spos;
writable_replace_subseq_elim dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h0 (Seq.slice (B.as_seq h0 src.base) (U32.v spos) (U32.v spos'));
blit_strong src.base spos dst.base dpos len;
let h = HST.get () in
[@inline_let] let dpos' = dpos `U32.add` len in
parse_strong_prefix p (bytes_of_slice_from h0 src spos) (bytes_of_slice_from h dst dpos);
valid_facts p h dst dpos;
dpos'
#pop-options
inline_for_extraction
let copy_strong'
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\ (
let clen = content_length p h src spos in
U32.v dpos + clen <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + clen) h /\
B.loc_disjoint (loc_slice_from src spos) (loc_slice_from_to dst dpos (dpos `U32.add` (U32.uint_to_t clen)))
)))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
))
= let spos' = j src spos in
copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak_with_length
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= if (dst.len `U32.sub` dpos) `U32.lt` (spos' `U32.sub` spos)
then max_uint32
else copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(jmp: jumper p)
(src: slice rrel1 rel1)
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (content_length p h src spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos (get_valid_pos p h src spos)) (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= let spos' = jmp src spos in
copy_weak_with_length p src spos spos' dst dpos
(* fold_left on lists *)
module BF = LowStar.Buffer
#push-options "--z3rlimit 256 --fuel 1 --ifuel 1"
#restart-solver
inline_for_extraction
let list_fold_left_gen
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(post_interrupt: ((h: HS.mem) -> GTot Type0))
(post_interrupt_frame: (h: HS.mem) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
post_interrupt h
)) (ensures (post_interrupt h')))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
HST.Stack bool
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos1 /\
valid_pos p h0 sl pos1 pos2 /\
valid_list p h0 sl pos2 pos' /\
inv h (contents_list p h0 sl pos pos1) (contents p h0 sl pos1 :: contents_list p h0 sl pos2 pos') pos1
))
(ensures (fun h ctinue h' ->
B.modifies (Ghost.reveal l) h h' /\
(if ctinue then inv h' (contents_list p h0 sl pos pos1 `L.append` [contents p h0 sl pos1]) (contents_list p h0 sl pos2 pos') pos2 else post_interrupt h')
))
))
: HST.Stack bool
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h res h' ->
B.modifies (Ghost.reveal l) h h' /\
(if res then inv h' (contents_list p h sl pos pos') [] pos' else post_interrupt h')
))
= HST.push_frame ();
let h1 = HST.get () in
// B.fresh_frame_modifies h0 h1;
let bpos : BF.pointer U32.t = BF.alloca pos 1ul in
let bctinue : BF.pointer bool = BF.alloca true 1ul in
let btest: BF.pointer bool = BF.alloca (pos `U32.lt` pos') 1ul in
let h2 = HST.get () in
assert (B.modifies B.loc_none h0 h2);
let test_pre (h: HS.mem) : GTot Type0 =
B.live h bpos /\ B.live h bctinue /\ B.live h btest /\ (
let pos1 = Seq.index (B.as_seq h bpos) 0 in
let ctinue = Seq.index (B.as_seq h bctinue) 0 in
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
B.modifies (Ghost.reveal l `B.loc_union` B.loc_region_only true (HS.get_tip h1)) h2 h /\
Seq.index (B.as_seq h btest) 0 == ((U32.v (Seq.index (B.as_seq h bpos) 0) < U32.v pos') && Seq.index (B.as_seq h bctinue) 0) /\
(if ctinue then inv h (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 else post_interrupt h)
)
in
let test_post (cond: bool) (h: HS.mem) : GTot Type0 =
test_pre h /\
cond == Seq.index (B.as_seq h btest) 0
in
valid_list_nil p h0 sl pos;
inv_frame h0 [] (contents_list p h0 sl pos pos') pos h1;
inv_frame h1 [] (contents_list p h0 sl pos pos') pos h2;
[@inline_let]
let while_body () : HST.Stack unit
(requires (fun h -> test_post true h))
(ensures (fun _ _ h1 -> test_pre h1))
=
let h51 = HST.get () in
let pos1 = B.index bpos 0ul in
valid_list_cons_recip p h0 sl pos1 pos';
//assert (B.modifies (Ghost.reveal l `B.loc_union` B.loc_buffer bpos) h0 h51);
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos'));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos pos1));
valid_list_cons_recip p h51 sl pos1 pos';
let pos2 = j sl pos1 in
let h52 = HST.get () in
inv_frame h51 (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 h52;
B.modifies_only_not_unused_in (Ghost.reveal l) h0 h52;
let ctinue = body pos1 pos2 in
let h53 = HST.get () in
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos2 pos'));
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h1));
valid_pos_frame_strong p h0 sl pos1 pos2 (Ghost.reveal l) h53;
valid_list_snoc p h0 sl pos pos1;
B.upd bpos 0ul pos2;
B.upd bctinue 0ul ctinue;
B.upd btest 0ul ((pos2 `U32.lt` pos') && ctinue);
let h54 = HST.get () in
[@inline_let]
let _ =
if ctinue
then inv_frame h53 (contents_list p h0 sl pos pos2) (contents_list p h0 sl pos2 pos') pos2 h54
else post_interrupt_frame h53 h54
in
()
in
C.Loops.while
#test_pre
#test_post
(fun (_: unit) -> (
B.index btest 0ul) <: HST.Stack bool (requires (fun h -> test_pre h)) (ensures (fun h x h1 -> test_post x h1)))
while_body
;
valid_list_nil p h0 sl pos';
let res = B.index bctinue 0ul in
let h3 = HST.get () in
HST.pop_frame ();
let h4 = HST.get () in
//B.popped_modifies h3 h4;
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h3));
[@inline_let]
let _ =
if res
then inv_frame h3 (contents_list p h0 sl pos pos') [] pos' h4
else post_interrupt_frame h3 h4
in
res
#pop-options
//B.loc_includes_union_l (B.loc_all_regions_from false (HS.get_tip h1)) (Ghost.reveal l) (Ghost.reveal l)
//B.modifies_fresh_frame_popped h0 h1 (Ghost.reveal l) h3 h4
module G = FStar.Ghost
inline_for_extraction
let list_fold_left
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
(l1: Ghost.erased (list t)) ->
(x: Ghost.erased t) ->
(l2: Ghost.erased (list t)) ->
HST.Stack unit
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos' /\
valid_content_pos p h0 sl pos1 (G.reveal x) pos2 /\
U32.v pos <= U32.v pos1 /\ U32.v pos2 <= U32.v pos' /\
B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2) /\
inv h (Ghost.reveal l1) (Ghost.reveal x :: Ghost.reveal l2) pos1 /\
contents_list p h0 sl pos pos' == Ghost.reveal l1 `L.append` (Ghost.reveal x :: Ghost.reveal l2)
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (Ghost.reveal l1 `L.append` [contents p h0 sl pos1]) (Ghost.reveal l2) pos2
))
))
: HST.Stack unit
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (contents_list p h sl pos pos') [] pos'
))
= let _ = list_fold_left_gen
p
j
sl
pos pos'
h0
l
inv
inv_frame
(fun _ -> False)
(fun _ _ -> ())
(fun pos1 pos2 ->
let h = HST.get () in
valid_list_cons p h sl pos1 pos';
valid_list_append p h sl pos pos1 pos';
body
pos1
pos2
(Ghost.hide (contents_list p h sl pos pos1))
(Ghost.hide (contents p h sl pos1))
(Ghost.hide (contents_list p h sl pos2 pos'))
;
true
)
in
()
inline_for_extraction
let list_length
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v res == L.length (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
B.fresh_frame_modifies h0 h1;
let blen : BF.pointer U32.t = BF.alloca 0ul 1ul in
let h2 = HST.get () in
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer blen))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_buffer blen) h2 h /\
B.live h blen /\ (
let len = U32.v (Seq.index (B.as_seq h blen) 0) in
len <= U32.v pos1 /\ // necessary to prove that length computations do not overflow
len == L.length l1
))
(fun h l1 l2 pos1 h' ->
B.modifies_only_not_unused_in (B.loc_buffer blen) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
B.upd blen 0ul (B.index blen 0ul `U32.add` 1ul);
Classical.forall_intro_2 (list_length_append #t)
)
;
let len = B.index blen 0ul in
HST.pop_frame ();
len
#push-options "--z3rlimit 32 --fuel 2 --ifuel 1"
inline_for_extraction
let list_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(f: (t -> Tot bool))
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
(x: Ghost.erased t) ->
HST.Stack bool
(requires (fun h -> valid_content p h sl pos (G.reveal x)))
(ensures (fun h res h' -> B.modifies B.loc_none h h' /\ res == f (G.reveal x)))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
(#rrel_out #rel_out: _)
(sl_out : slice rrel_out rel_out)
(pos_out : U32.t)
: HST.Stack U32.t
(requires (fun h ->
U32.v pos_out + U32.v pos' - U32.v pos <= U32.v sl_out.len /\
valid_list p h sl pos pos' /\
B.loc_disjoint (loc_slice_from_to sl pos pos') (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
live_slice h sl_out
))
(ensures (fun h pos_out' h' ->
B.modifies (loc_slice_from_to sl_out pos_out pos_out') h h' /\
U32.v pos_out' - U32.v pos_out <= U32.v pos' - U32.v pos /\
valid_list p h' sl_out pos_out pos_out' /\
contents_list p h' sl_out pos_out pos_out' == L.filter f (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
//B.fresh_frame_modifies h0 h1;
let bpos_out' : BF.pointer U32.t = BF.alloca pos_out 1ul in
let h2 = HST.get () in
let inv (h: HS.mem) (l1 l2: list t) (pos1: U32.t) : GTot Type0 =
B.live h bpos_out' /\ (
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
valid_list p h sl_out pos_out pos_out' /\
contents_list p h sl_out pos_out pos_out' == L.filter f l1 /\
U32.v pos_out' - U32.v pos1 <= U32.v pos_out - U32.v pos // necessary to prove that length computations do not overflow
)
in
valid_list_nil p h2 sl_out pos_out;
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))))
inv
(fun h l1 l2 pos1 h' ->
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies_only_not_unused_in (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
let pos_out1 = B.index bpos_out' 0ul in
list_filter_append f (G.reveal l1) [G.reveal x];
if f' sl pos1 x
then begin
assert (B.loc_includes (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) (loc_slice_from_to sl_out pos_out1 (pos_out1 `U32.add` (pos2 `U32.sub` pos1))));
let h = HST.get () in
writable_weaken sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (U32.v pos_out1) (U32.v pos_out1 + (U32.v pos2 - U32.v pos1));
let pos_out2 = copy_strong p sl pos1 pos2 sl_out pos_out1 in
B.upd bpos_out' 0ul pos_out2;
let h' = HST.get () in
writable_modifies sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (B.loc_region_only true (HS.get_tip h1)) h';
valid_list_nil p h' sl_out pos_out2;
valid_list_cons p h' sl_out pos_out1 pos_out2;
valid_list_append p h' sl_out pos_out pos_out1 pos_out2
end else
L.append_l_nil (L.filter f (G.reveal l1))
)
;
let pos_out' = B.index bpos_out' 0ul in
HST.pop_frame ();
pos_out'
#pop-options
#push-options "--z3rlimit 64 --fuel 2 --ifuel 1"
inline_for_extraction
let list_nth
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(i: U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos' /\
U32.v i < L.length (contents_list p h sl pos pos')
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
valid_list p h sl pos res /\
valid p h sl res /\
valid_list p h sl (get_valid_pos p h sl res) pos' /\
L.length (contents_list p h sl pos res) == U32.v i /\
contents p h sl res == L.index (contents_list p h sl pos pos') (U32.v i)
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bpos1 = BF.alloca pos 1ul in
let bk = BF.alloca 0ul 1ul in
let h2 = HST.get () in
valid_list_nil p h0 sl pos;
let _ : bool = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
let k = Seq.index (B.as_seq h bk) 0 in
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bpos1 /\
B.live h bk /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v k /\
U32.v k <= U32.v i
)
(fun h _ _ _ h' ->
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bpos1);
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bk);
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
let pos1 = Seq.index (B.as_seq h bpos1) 0 in
B.live h bpos1 /\
valid p h0 sl pos1 /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl (get_valid_pos p h0 sl pos1) pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v i /\
contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i)
)
(fun _ _ ->
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 ->
let k = B.index bk 0ul in
if k = i
then begin
B.upd bpos1 0ul pos1;
valid_list_cons_recip p h0 sl pos1 pos';
list_index_append (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') (U32.v i);
valid_list_append p h0 sl pos pos1 pos' ;
assert (contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i));
false
end else begin
B.upd bk 0ul (k `U32.add` 1ul);
let h = HST.get () in
B.modifies_only_not_unused_in B.loc_none h0 h;
valid_list_snoc p h0 sl pos pos1;
assert (valid p h0 sl pos1);
assert (pos2 == get_valid_pos p h0 sl pos1);
assert (valid_list p h0 sl pos pos2);
list_length_append (contents_list p h0 sl pos pos1) [contents p h0 sl pos1];
true
end
)
in
let res = B.index bpos1 0ul in
HST.pop_frame ();
res
inline_for_extraction
let list_find
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(f: (t -> Tot bool)) // should be GTot, but List.find requires Tot
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack bool
(requires (fun h ->
valid p h sl pos
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == f (contents p h sl pos)
))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let l = contents_list p h sl pos pos' in
if res = pos'
then L.find f l == None
else
U32.v pos <= U32.v res /\
valid p h sl res /\ (
let x = contents p h sl res in
U32.v res + content_length p h sl res <= U32.v pos' /\
f x == true /\
L.find f l == Some x
)
)))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bres = BF.alloca 0ul 1ul in
let h2 = HST.get () in
let not_found = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\
valid_list p h0 sl pos1 pos' /\
l2 == contents_list p h0 sl pos1 pos' /\
L.find f (contents_list p h0 sl pos pos') == L.find f l2
)
(fun h _ _ _ h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\ (
let res = Seq.index (B.as_seq h bres) 0 in
U32.v pos <= U32.v res /\
valid p h0 sl res /\ (
let x = contents p h0 sl res in
U32.v res + content_length p h0 sl res <= U32.v pos' /\
f x == true /\
L.find f (contents_list p h0 sl pos pos') == Some x
)))
(fun h h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun pos1 pos2 ->
if f' sl pos1
then begin
B.upd bres 0ul pos1;
false
end
else true
)
in
let res =
if not_found
then pos'
else B.index bres 0ul
in
HST.pop_frame ();
res
#pop-options
let rec list_existsb_find
(#a: Type)
(f: (a -> Tot bool))
(l: list a)
: Lemma
(L.existsb f l == Some? (L.find f l))
= match l with
| [] -> ()
| x :: q ->
if f x
then ()
else list_existsb_find f q
inline_for_extraction
noextract
let list_existsb
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(f: (t -> Tot bool)) // should be GTot, but List.find requires Tot
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack bool
(requires (fun h ->
valid p h sl pos
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == f (contents p h sl pos)
))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack bool
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == L.existsb f (contents_list p h sl pos pos')
))
= let h = HST.get () in
list_existsb_find f (contents_list p h sl pos pos');
let posn = list_find j f f' sl pos pos' in
posn <> pos'
#push-options "--fuel 2 --ifuel 1 --z3rlimit 256 --query_stats"
inline_for_extraction
noextract
let list_flatten_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 { k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0 } )
(f: (t1 -> Tot (list t2))) // should be GTot, but List.Tot.map requires Tot
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1' : U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2: U32.t {
valid_list p1 h0 sl1 pos1 pos1' /\
U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\
U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32
})
(f' : (
(pos1_: U32.t) ->
(pos2_: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\
U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\ (
let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2_ res /\
contents_list p2 h' sl2 pos2_ res == y
)))
))
: HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\ (
let y = List.Tot.flatten (List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1')) in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2 res /\
contents_list p2 h' sl2 pos2 res == y
)))
= let hz = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bpos2_ = BF.alloca pos2 1ul in
let h2 = HST.get () in
valid_list_nil p2 hz sl2 pos2;
let fits = list_fold_left_gen
p1
j1
sl1
pos1 pos1'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2))
(fun h ll lr _ ->
B.modifies (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h /\
B.live h bpos2_ /\ (
let pos2_ = Seq.index (B.as_seq h bpos2_) 0 in
contents_list p1 h0 sl1 pos1 pos1' == ll `List.Tot.append` lr /\
valid_list p2 h sl2 pos2 pos2_ /\
contents_list p2 h sl2 pos2 pos2_ == List.Tot.flatten (List.Tot.map f ll) /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(fun h _ _ _ h' ->
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun h ->
B.modifies (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h /\
U32.v pos2 + serialized_list_length s2 (List.Tot.flatten (List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1'))) > U32.v sl2.len
)
(fun h h' ->
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1l pos1r ->
let pos2_ = B.index bpos2_ 0ul in
let h = HST.get () in
writable_weaken sl2.base (U32.v pos2) (U32.v sl2.len) h (U32.v pos2_) (U32.v sl2.len);
valid_pos_frame_strong p1 h0 sl1 pos1l pos1r (loc_slice_from sl2 pos2) hz;
let res = f' pos1l pos2_ in
let fits = not (res = max_uint32) in
if fits then begin
B.upd bpos2_ 0ul res;
let h' = HST.get () in
writable_modifies sl2.base (U32.v pos2) (U32.v sl2.len) h (B.loc_region_only true (HS.get_tip h1)) h' ;
List.Tot.append_assoc (contents_list p1 h0 sl1 pos1 pos1l) [contents p1 h0 sl1 pos1l] (contents_list p1 h0 sl1 pos1r pos1');
list_flatten_map_append f (contents_list p1 h0 sl1 pos1 pos1l) [contents p1 h0 sl1 pos1l];
valid_list_snoc p1 h0 sl1 pos1 pos1l;
valid_list_append p2 h' sl2 pos2 pos2_ res;
valid_list_nil p2 h' sl2 res;
valid_list_append p2 h' sl2 pos2_ res res
end else begin
let h' = HST.get () in
valid_list_cons p1 h0 sl1 pos1l pos1' ;
valid_list_append p1 h0 sl1 pos1 pos1l pos1' ;
list_flatten_map_append f (contents_list p1 h0 sl1 pos1 pos1l) (contents_list p1 h0 sl1 pos1l pos1');
serialized_list_length_append s2 (L.flatten (L.map f (contents_list p1 h0 sl1 pos1 pos1l))) (L.flatten (L.map f (contents_list p1 h0 sl1 pos1l pos1')));
serialized_list_length_append s2 (f (contents p1 h0 sl1 pos1l)) (L.flatten (L.map f (contents_list p1 h0 sl1 pos1r pos1')));
valid_list_serialized_list_length s2 h' sl2 pos2 pos2_
end;
fits
)
in
let res =
if fits
then B.index bpos2_ 0ul
else max_uint32
in
HST.pop_frame ();
res
#pop-options
#push-options "--z3rlimit 16 --query_stats"
inline_for_extraction
noextract
let list_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 { k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0 } )
(f: (t1 -> Tot t2)) // should be GTot, but List.Tot.map requires Tot
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1' : U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2: U32.t {
valid_list p1 h0 sl1 pos1 pos1' /\
U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\
U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32
})
(f' : (
(pos1_: U32.t) ->
(pos2_: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\
U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\ (
let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_length s2 y > U32.v sl2.len
else
valid_content_pos p2 h' sl2 pos2_ y res
)))
))
: HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\ (
let y = List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1') in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2 res /\
contents_list p2 h' sl2 pos2 res == y
)))
= list_map_list_flatten_map f (contents_list p1 h0 sl1 pos1 pos1');
list_flatten_map
j1
s2
(fun x -> [f x])
h0
sl1 pos1 pos1'
sl2 pos2
(fun pos1 pos2 ->
let res = f' pos1 pos2 in
let h = HST.get () in
if res = max_uint32
then begin
serialized_list_length_nil s2;
serialized_list_length_cons s2 (f (contents p1 h0 sl1 pos1)) []
end
else begin
valid_list_nil p2 h sl2 res;
valid_list_cons p2 h sl2 pos2 res
end;
res
)
(* Example: trivial printers *)
inline_for_extraction
let print_list
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(print: ((#rrel: _) -> (#rel: _) -> (sl: slice rrel rel) -> (pos: U32.t) -> HST.Stack unit (requires (fun h -> valid p h sl pos)) (ensures (fun h _ h' -> B.modifies B.loc_none h h'))))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack unit
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h _ h' ->
B.modifies B.loc_none h h'
))
= let h0 = HST.get () in
list_fold_left
p
j
sl
pos pos'
h0
(Ghost.hide B.loc_none)
(fun _ _ _ _ -> True)
(fun _ _ _ _ _ -> ())
(fun pos1 _ _ _ _ ->
print sl pos1
)
(* Monotonicity *)
inline_for_extraction
let compl_t (t: Type) = U32.t -> t -> U32.t -> Tot (B.spred byte)
let wvalid
(#t: Type) (#k: parser_kind) (p: parser k t) (#rrel #rel: _) (s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
(gpos' : Ghost.erased U32.t)
(gv: Ghost.erased t)
(x: Seq.seq byte)
: GTot prop | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val wvalid
(#t: Type)
(#k: parser_kind)
(p: parser k t)
(#rrel #rel: _)
(s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
(gpos': Ghost.erased U32.t)
(gv: Ghost.erased t)
(x: Seq.seq byte)
: GTot prop | [] | LowParse.Low.Base.wvalid | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
p: LowParse.Spec.Base.parser k t ->
s: LowParse.Slice.slice rrel rel ->
compl: LowParse.Low.Base.compl_t t ->
pos: FStar.UInt32.t ->
gpos': FStar.Ghost.erased FStar.UInt32.t ->
gv: FStar.Ghost.erased t ->
x: FStar.Seq.Base.seq LowParse.Bytes.byte
-> Prims.GTot Prims.prop | {
"end_col": 52,
"end_line": 2023,
"start_col": 2,
"start_line": 2019
} |
Prims.Tot | val serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes). parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2)) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
= fun x #rrel #rel b pos -> s1' x b pos | val serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes). parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes). parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2)) = | false | null | false | fun x #rrel #rel b pos -> s1' x b pos | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"LowParse.Low.Base.serializer32",
"Prims.squash",
"Prims.l_and",
"Prims.eq2",
"Prims.l_Forall",
"LowParse.Bytes.bytes",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"LowStar.Monotonic.Buffer.srel",
"LowParse.Bytes.byte",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"LowParse.Spec.Base.serialize_ext"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_weak
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
U32.v sl.len < U32.v max_uint32 /\
writable sl.base (U32.v pos) (U32.v sl.len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from sl pos) h h' /\ (
if pos' = max_uint32
then U32.v pos + serialized_length s x > U32.v sl.len
else valid_content_pos p h' sl pos x pos'
)))
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_strong
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let sq = B.as_seq h sl.base in
let len = serialized_length s x in
live_slice h sl /\
U32.v pos + len <= U32.v sl.len /\
writable sl.base (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from_to sl pos pos') h h' /\
valid_content_pos p h' sl pos x pos'
))
[@unifier_hint_injective]
inline_for_extraction
let serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(b: B.mbuffer byte rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v pos + len <= B.length b /\
writable b (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b pos (pos `U32.add` len)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x
)))
inline_for_extraction
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes). parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2)) | [] | LowParse.Low.Base.serialize32_ext | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
p1: LowParse.Spec.Base.parser k1 t1 ->
s1: LowParse.Spec.Base.serializer p1 ->
s1': LowParse.Low.Base.serializer32 s1 ->
p2: LowParse.Spec.Base.parser k2 t2 ->
u284:
Prims.squash (t1 == t2 /\
(forall (input: LowParse.Bytes.bytes).
LowParse.Spec.Base.parse p1 input == LowParse.Spec.Base.parse p2 input))
-> LowParse.Low.Base.serializer32 (LowParse.Spec.Base.serialize_ext p1 s1 p2) | {
"end_col": 39,
"end_line": 858,
"start_col": 2,
"start_line": 858
} |
Prims.Tot | val make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f:
(input: Ghost.erased bytes
-> Pure U32.t
(requires
(Seq.length (Ghost.reveal input) < 4294967296 /\
gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))))
: Tot (accessor g) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos)) | val make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f:
(input: Ghost.erased bytes
-> Pure U32.t
(requires
(Seq.length (Ghost.reveal input) < 4294967296 /\
gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))))
: Tot (accessor g)
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f:
(input: Ghost.erased bytes
-> Pure U32.t
(requires
(Seq.length (Ghost.reveal input) < 4294967296 /\
gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))))
: Tot (accessor g) = | false | null | false | fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@@ inline_let ]let _ = slice_access_eq h g sl pos in
pos `U32.add` (f (Ghost.hide (bytes_of_slice_from h sl pos))) | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Low.Base.Spec.clens",
"LowParse.Low.Base.Spec.gaccessor",
"FStar.Ghost.erased",
"LowParse.Bytes.bytes",
"FStar.UInt32.t",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"FStar.Ghost.reveal",
"LowParse.Low.Base.Spec.gaccessor_pre",
"Prims.eq2",
"Prims.int",
"Prims.l_or",
"FStar.UInt.size",
"FStar.UInt32.n",
"Prims.op_GreaterThanOrEqual",
"FStar.UInt32.v",
"LowParse.Slice.srel",
"LowParse.Slice.slice",
"FStar.UInt32.add",
"FStar.Ghost.hide",
"LowParse.Slice.bytes_of_slice_from",
"Prims.unit",
"LowParse.Low.Base.Spec.slice_access_eq",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowParse.Low.Base.accessor"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
)) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f:
(input: Ghost.erased bytes
-> Pure U32.t
(requires
(Seq.length (Ghost.reveal input) < 4294967296 /\
gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))))
: Tot (accessor g) | [] | LowParse.Low.Base.make_accessor_from_pure | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
$g: LowParse.Low.Base.Spec.gaccessor p1 p2 cl ->
f: (input: FStar.Ghost.erased LowParse.Bytes.bytes -> Prims.Pure FStar.UInt32.t)
-> LowParse.Low.Base.accessor g | {
"end_col": 61,
"end_line": 65,
"start_col": 2,
"start_line": 60
} |
FStar.HyperStack.ST.Stack | val mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires
(fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\ Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\ Ghost.reveal pos' <= B.length b))
(ensures
(fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v)) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v | val mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires
(fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\ Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\ Ghost.reveal pos' <= B.length b))
(ensures
(fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v))
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires
(fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\ Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\ Ghost.reveal pos' <= B.length b))
(ensures
(fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v)) = | true | null | false | let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Ghost.erased",
"Prims.nat",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.upd'",
"Prims.unit",
"LowStar.Monotonic.Buffer.g_upd_seq_as_seq",
"FStar.Seq.Base.upd",
"LowStar.Monotonic.Buffer.as_seq",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.g_upd_modifies_strong",
"LowParse.Low.Base.writable_upd",
"FStar.Ghost.reveal",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"Prims.l_and",
"LowParse.Low.Base.writable",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"LowStar.Monotonic.Buffer.length",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_buffer_from_to",
"FStar.UInt32.add",
"FStar.UInt32.__uint_to_t",
"Prims.eq2",
"FStar.Seq.Base.seq"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires
(fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\ Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\ Ghost.reveal pos' <= B.length b))
(ensures
(fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v)) | [] | LowParse.Low.Base.mbuffer_upd | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
b: LowStar.Monotonic.Buffer.mbuffer t rrel rel ->
pos: FStar.Ghost.erased Prims.nat ->
pos': FStar.Ghost.erased Prims.nat ->
i: FStar.UInt32.t ->
v: t
-> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 14,
"end_line": 766,
"start_col": 1,
"start_line": 762
} |
Prims.Tot | val leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u:
squash (k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32))
: Tot (leaf_writer_weak s) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32
))
: Tot (leaf_writer_weak s)
= fun x #rrel #rel input pos ->
if (input.len `U32.sub` pos) `U32.lt` sz
then max_uint32
else begin
let h = HST.get () in
writable_weaken input.base (U32.v pos) (U32.v input.len) h (U32.v pos) (U32.v pos + U32.v sz);
s32 x input pos
end | val leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u:
squash (k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32))
: Tot (leaf_writer_weak s)
let leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u:
squash (k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32))
: Tot (leaf_writer_weak s) = | false | null | false | fun x #rrel #rel input pos ->
if (input.len `U32.sub` pos) `U32.lt` sz
then max_uint32
else
let h = HST.get () in
writable_weaken input.base (U32.v pos) (U32.v input.len) h (U32.v pos) (U32.v pos + U32.v sz);
s32 x input pos | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"LowParse.Low.Base.leaf_writer_strong",
"FStar.UInt32.t",
"Prims.squash",
"Prims.l_and",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"Prims.nat",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_high",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_low",
"Prims.int",
"Prims.l_or",
"Prims.b2t",
"Prims.op_GreaterThanOrEqual",
"FStar.UInt.size",
"FStar.UInt32.n",
"FStar.UInt32.v",
"Prims.op_LessThan",
"LowParse.Low.ErrorCode.max_uint32",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"FStar.UInt32.lt",
"FStar.UInt32.sub",
"LowParse.Slice.__proj__Mkslice__item__len",
"Prims.bool",
"Prims.unit",
"LowParse.Low.Base.writable_weaken",
"LowParse.Slice.buffer_srel_of_srel",
"LowParse.Slice.__proj__Mkslice__item__base",
"Prims.op_Addition",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowParse.Low.Base.leaf_writer_weak"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_weak
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
U32.v sl.len < U32.v max_uint32 /\
writable sl.base (U32.v pos) (U32.v sl.len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from sl pos) h h' /\ (
if pos' = max_uint32
then U32.v pos + serialized_length s x > U32.v sl.len
else valid_content_pos p h' sl pos x pos'
)))
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_strong
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let sq = B.as_seq h sl.base in
let len = serialized_length s x in
live_slice h sl /\
U32.v pos + len <= U32.v sl.len /\
writable sl.base (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from_to sl pos pos') h h' /\
valid_content_pos p h' sl pos x pos'
))
[@unifier_hint_injective]
inline_for_extraction
let serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(b: B.mbuffer byte rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v pos + len <= B.length b /\
writable b (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b pos (pos `U32.add` len)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x
)))
inline_for_extraction
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
= fun x #rrel #rel b pos -> s1' x b pos
inline_for_extraction
let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(posl: Ghost.erased U32.t)
(posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\
U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos) /\
Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr))
)))
=
let h0 = HST.get () in
writable_weaken b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 (U32.v pos) (U32.v pos + Seq.length (serialize s x));
let res = s32 x b pos in
let h1 = HST.get () in
let pos' = pos `U32.add` res in
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos pos';
writable_modifies b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 B.loc_none h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) (Ghost.reveal posl) pos;
B.loc_disjoint_loc_buffer_from_to b (Ghost.reveal posl) pos pos pos';
B.modifies_buffer_from_to_elim b (Ghost.reveal posl) pos (B.loc_buffer_from_to b pos pos') h0 h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos' (Ghost.reveal posr);
B.loc_disjoint_loc_buffer_from_to b pos pos' pos' (Ghost.reveal posr);
B.modifies_buffer_from_to_elim b pos' (Ghost.reveal posr) (B.loc_buffer_from_to b pos pos') h0 h1;
res
inline_for_extraction
let leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s)
= fun x #rrel #rel input pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let len = s32 x input.base pos in
[@inline_let]
let pos' = pos `U32.add` len in
let h = HST.get () in
[@inline_let] let _ =
let large = bytes_of_slice_from h input pos in
let small = bytes_of_slice_from_to h input pos pos' in
parse_strong_prefix p small large;
valid_facts p h input pos
in
pos'
inline_for_extraction
let leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32
)) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u:
squash (k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32))
: Tot (leaf_writer_weak s) | [] | LowParse.Low.Base.leaf_writer_weak_of_strong_constant_size | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
s32: LowParse.Low.Base.leaf_writer_strong s ->
sz: FStar.UInt32.t ->
u307:
Prims.squash (Mkparser_kind'?.parser_kind_high k ==
FStar.Pervasives.Native.Some (Mkparser_kind'?.parser_kind_low k) /\
Mkparser_kind'?.parser_kind_low k == FStar.UInt32.v sz /\
Mkparser_kind'?.parser_kind_low k < FStar.UInt32.v LowParse.Low.ErrorCode.max_uint32)
-> LowParse.Low.Base.leaf_writer_weak s | {
"end_col": 5,
"end_line": 954,
"start_col": 2,
"start_line": 947
} |
FStar.HyperStack.ST.Stack | val recall_valid_gen
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(#s: slice rrel rel)
(#compl: compl_t t)
(i: irepr p s compl)
: HST.Stack unit
(requires (fun h -> B.recallable s.base \/ live_slice h s))
(ensures
(fun h _ h' ->
h' == h /\ live_slice h s /\
valid_content_pos p h s (irepr_pos i) (irepr_v i) (irepr_pos' i) /\
compl (irepr_pos i) (irepr_v i) (irepr_pos' i) (B.as_seq h s.base))) | [
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "BF"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let recall_valid_gen
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(#s: slice rrel rel)
(#compl: compl_t t)
(i: irepr p s compl)
: HST.Stack unit
(requires (fun h -> B.recallable s.base \/ live_slice h s))
(ensures (fun h _ h' ->
h' == h /\
live_slice h s /\
valid_content_pos p h s (irepr_pos i) (irepr_v i) (irepr_pos' i) /\
compl (irepr_pos i) (irepr_v i) (irepr_pos' i) (B.as_seq h s.base)
))
= let h = HST.get () in
B.recall_p s.base (wvalid p s compl (irepr_pos i) (IRepr?.gpos' i) (IRepr?.gv i));
wvalid_valid_content_pos p s compl (irepr_pos i) (IRepr?.gpos' i) (IRepr?.gv i) (B.as_seq h s.base) h | val recall_valid_gen
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(#s: slice rrel rel)
(#compl: compl_t t)
(i: irepr p s compl)
: HST.Stack unit
(requires (fun h -> B.recallable s.base \/ live_slice h s))
(ensures
(fun h _ h' ->
h' == h /\ live_slice h s /\
valid_content_pos p h s (irepr_pos i) (irepr_v i) (irepr_pos' i) /\
compl (irepr_pos i) (irepr_v i) (irepr_pos' i) (B.as_seq h s.base)))
let recall_valid_gen
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(#s: slice rrel rel)
(#compl: compl_t t)
(i: irepr p s compl)
: HST.Stack unit
(requires (fun h -> B.recallable s.base \/ live_slice h s))
(ensures
(fun h _ h' ->
h' == h /\ live_slice h s /\
valid_content_pos p h s (irepr_pos i) (irepr_v i) (irepr_pos' i) /\
compl (irepr_pos i) (irepr_v i) (irepr_pos' i) (B.as_seq h s.base))) = | true | null | false | let h = HST.get () in
B.recall_p s.base (wvalid p s compl (irepr_pos i) (IRepr?.gpos' i) (IRepr?.gv i));
wvalid_valid_content_pos p
s
compl
(irepr_pos i)
(IRepr?.gpos' i)
(IRepr?.gv i)
(B.as_seq h s.base)
h | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"LowParse.Low.Base.compl_t",
"LowParse.Low.Base.irepr",
"LowParse.Low.Base.wvalid_valid_content_pos",
"LowParse.Low.Base.irepr_pos",
"LowParse.Low.Base.__proj__IRepr__item__gpos'",
"LowParse.Low.Base.__proj__IRepr__item__gv",
"LowStar.Monotonic.Buffer.as_seq",
"LowParse.Slice.buffer_srel_of_srel",
"LowParse.Slice.__proj__Mkslice__item__base",
"Prims.unit",
"LowStar.Monotonic.Buffer.recall_p",
"LowParse.Low.Base.wvalid",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"Prims.l_or",
"LowStar.Monotonic.Buffer.recallable",
"LowParse.Slice.live_slice",
"Prims.l_and",
"Prims.eq2",
"LowParse.Low.Base.Spec.valid_content_pos",
"LowParse.Low.Base.irepr_v",
"LowParse.Low.Base.irepr_pos'"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_weak
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
U32.v sl.len < U32.v max_uint32 /\
writable sl.base (U32.v pos) (U32.v sl.len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from sl pos) h h' /\ (
if pos' = max_uint32
then U32.v pos + serialized_length s x > U32.v sl.len
else valid_content_pos p h' sl pos x pos'
)))
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_strong
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let sq = B.as_seq h sl.base in
let len = serialized_length s x in
live_slice h sl /\
U32.v pos + len <= U32.v sl.len /\
writable sl.base (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from_to sl pos pos') h h' /\
valid_content_pos p h' sl pos x pos'
))
[@unifier_hint_injective]
inline_for_extraction
let serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(b: B.mbuffer byte rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v pos + len <= B.length b /\
writable b (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b pos (pos `U32.add` len)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x
)))
inline_for_extraction
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
= fun x #rrel #rel b pos -> s1' x b pos
inline_for_extraction
let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(posl: Ghost.erased U32.t)
(posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\
U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos) /\
Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr))
)))
=
let h0 = HST.get () in
writable_weaken b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 (U32.v pos) (U32.v pos + Seq.length (serialize s x));
let res = s32 x b pos in
let h1 = HST.get () in
let pos' = pos `U32.add` res in
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos pos';
writable_modifies b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 B.loc_none h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) (Ghost.reveal posl) pos;
B.loc_disjoint_loc_buffer_from_to b (Ghost.reveal posl) pos pos pos';
B.modifies_buffer_from_to_elim b (Ghost.reveal posl) pos (B.loc_buffer_from_to b pos pos') h0 h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos' (Ghost.reveal posr);
B.loc_disjoint_loc_buffer_from_to b pos pos' pos' (Ghost.reveal posr);
B.modifies_buffer_from_to_elim b pos' (Ghost.reveal posr) (B.loc_buffer_from_to b pos pos') h0 h1;
res
inline_for_extraction
let leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s)
= fun x #rrel #rel input pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let len = s32 x input.base pos in
[@inline_let]
let pos' = pos `U32.add` len in
let h = HST.get () in
[@inline_let] let _ =
let large = bytes_of_slice_from h input pos in
let small = bytes_of_slice_from_to h input pos pos' in
parse_strong_prefix p small large;
valid_facts p h input pos
in
pos'
inline_for_extraction
let leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32
))
: Tot (leaf_writer_weak s)
= fun x #rrel #rel input pos ->
if (input.len `U32.sub` pos) `U32.lt` sz
then max_uint32
else begin
let h = HST.get () in
writable_weaken input.base (U32.v pos) (U32.v input.len) h (U32.v pos) (U32.v pos + U32.v sz);
s32 x input pos
end
inline_for_extraction
let serializer32_of_leaf_writer_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_subkind == Some ParserStrong /\
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
))
: Tot (serializer32 s)
= fun x #rrel #rel b pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let pos' = s32 x (make_slice b (pos `U32.add` sz)) pos in
[@inline_let]
let len = pos' `U32.sub` pos in
let h = HST.get () in
[@inline_let] let _ =
valid_valid_exact p h (make_slice b (pos `U32.add` sz)) pos;
valid_exact_serialize s h (make_slice b (pos `U32.add` sz)) pos pos'
in
len
inline_for_extraction
let blit_strong
(#a:Type) (#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src:U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst:U32.t)
(len:U32.t)
: HST.Stack unit
(requires (fun h ->
B.live h src /\ B.live h dst /\
U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len)) (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)
))
= let h = HST.get () in
B.blit src idx_src dst idx_dst len;
let h' = HST.get () in
B.modifies_loc_buffer_from_to_intro dst idx_dst (idx_dst `U32.add` len) B.loc_none h h'
#push-options "--z3rlimit 16"
inline_for_extraction
let copy_strong
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
U32.v dpos + U32.v spos' - U32.v spos <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from_to dst dpos (dpos `U32.add` (spos' `U32.sub` spos)))
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos' /\
dpos' `U32.sub` dpos == spos' `U32.sub` spos
))
= let h0 = HST.get () in
let len = spos' `U32.sub` spos in
valid_facts p h0 src spos;
writable_replace_subseq_elim dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h0 (Seq.slice (B.as_seq h0 src.base) (U32.v spos) (U32.v spos'));
blit_strong src.base spos dst.base dpos len;
let h = HST.get () in
[@inline_let] let dpos' = dpos `U32.add` len in
parse_strong_prefix p (bytes_of_slice_from h0 src spos) (bytes_of_slice_from h dst dpos);
valid_facts p h dst dpos;
dpos'
#pop-options
inline_for_extraction
let copy_strong'
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\ (
let clen = content_length p h src spos in
U32.v dpos + clen <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + clen) h /\
B.loc_disjoint (loc_slice_from src spos) (loc_slice_from_to dst dpos (dpos `U32.add` (U32.uint_to_t clen)))
)))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
))
= let spos' = j src spos in
copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak_with_length
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= if (dst.len `U32.sub` dpos) `U32.lt` (spos' `U32.sub` spos)
then max_uint32
else copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(jmp: jumper p)
(src: slice rrel1 rel1)
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (content_length p h src spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos (get_valid_pos p h src spos)) (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= let spos' = jmp src spos in
copy_weak_with_length p src spos spos' dst dpos
(* fold_left on lists *)
module BF = LowStar.Buffer
#push-options "--z3rlimit 256 --fuel 1 --ifuel 1"
#restart-solver
inline_for_extraction
let list_fold_left_gen
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(post_interrupt: ((h: HS.mem) -> GTot Type0))
(post_interrupt_frame: (h: HS.mem) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
post_interrupt h
)) (ensures (post_interrupt h')))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
HST.Stack bool
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos1 /\
valid_pos p h0 sl pos1 pos2 /\
valid_list p h0 sl pos2 pos' /\
inv h (contents_list p h0 sl pos pos1) (contents p h0 sl pos1 :: contents_list p h0 sl pos2 pos') pos1
))
(ensures (fun h ctinue h' ->
B.modifies (Ghost.reveal l) h h' /\
(if ctinue then inv h' (contents_list p h0 sl pos pos1 `L.append` [contents p h0 sl pos1]) (contents_list p h0 sl pos2 pos') pos2 else post_interrupt h')
))
))
: HST.Stack bool
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h res h' ->
B.modifies (Ghost.reveal l) h h' /\
(if res then inv h' (contents_list p h sl pos pos') [] pos' else post_interrupt h')
))
= HST.push_frame ();
let h1 = HST.get () in
// B.fresh_frame_modifies h0 h1;
let bpos : BF.pointer U32.t = BF.alloca pos 1ul in
let bctinue : BF.pointer bool = BF.alloca true 1ul in
let btest: BF.pointer bool = BF.alloca (pos `U32.lt` pos') 1ul in
let h2 = HST.get () in
assert (B.modifies B.loc_none h0 h2);
let test_pre (h: HS.mem) : GTot Type0 =
B.live h bpos /\ B.live h bctinue /\ B.live h btest /\ (
let pos1 = Seq.index (B.as_seq h bpos) 0 in
let ctinue = Seq.index (B.as_seq h bctinue) 0 in
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
B.modifies (Ghost.reveal l `B.loc_union` B.loc_region_only true (HS.get_tip h1)) h2 h /\
Seq.index (B.as_seq h btest) 0 == ((U32.v (Seq.index (B.as_seq h bpos) 0) < U32.v pos') && Seq.index (B.as_seq h bctinue) 0) /\
(if ctinue then inv h (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 else post_interrupt h)
)
in
let test_post (cond: bool) (h: HS.mem) : GTot Type0 =
test_pre h /\
cond == Seq.index (B.as_seq h btest) 0
in
valid_list_nil p h0 sl pos;
inv_frame h0 [] (contents_list p h0 sl pos pos') pos h1;
inv_frame h1 [] (contents_list p h0 sl pos pos') pos h2;
[@inline_let]
let while_body () : HST.Stack unit
(requires (fun h -> test_post true h))
(ensures (fun _ _ h1 -> test_pre h1))
=
let h51 = HST.get () in
let pos1 = B.index bpos 0ul in
valid_list_cons_recip p h0 sl pos1 pos';
//assert (B.modifies (Ghost.reveal l `B.loc_union` B.loc_buffer bpos) h0 h51);
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos'));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos pos1));
valid_list_cons_recip p h51 sl pos1 pos';
let pos2 = j sl pos1 in
let h52 = HST.get () in
inv_frame h51 (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 h52;
B.modifies_only_not_unused_in (Ghost.reveal l) h0 h52;
let ctinue = body pos1 pos2 in
let h53 = HST.get () in
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos2 pos'));
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h1));
valid_pos_frame_strong p h0 sl pos1 pos2 (Ghost.reveal l) h53;
valid_list_snoc p h0 sl pos pos1;
B.upd bpos 0ul pos2;
B.upd bctinue 0ul ctinue;
B.upd btest 0ul ((pos2 `U32.lt` pos') && ctinue);
let h54 = HST.get () in
[@inline_let]
let _ =
if ctinue
then inv_frame h53 (contents_list p h0 sl pos pos2) (contents_list p h0 sl pos2 pos') pos2 h54
else post_interrupt_frame h53 h54
in
()
in
C.Loops.while
#test_pre
#test_post
(fun (_: unit) -> (
B.index btest 0ul) <: HST.Stack bool (requires (fun h -> test_pre h)) (ensures (fun h x h1 -> test_post x h1)))
while_body
;
valid_list_nil p h0 sl pos';
let res = B.index bctinue 0ul in
let h3 = HST.get () in
HST.pop_frame ();
let h4 = HST.get () in
//B.popped_modifies h3 h4;
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h3));
[@inline_let]
let _ =
if res
then inv_frame h3 (contents_list p h0 sl pos pos') [] pos' h4
else post_interrupt_frame h3 h4
in
res
#pop-options
//B.loc_includes_union_l (B.loc_all_regions_from false (HS.get_tip h1)) (Ghost.reveal l) (Ghost.reveal l)
//B.modifies_fresh_frame_popped h0 h1 (Ghost.reveal l) h3 h4
module G = FStar.Ghost
inline_for_extraction
let list_fold_left
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
(l1: Ghost.erased (list t)) ->
(x: Ghost.erased t) ->
(l2: Ghost.erased (list t)) ->
HST.Stack unit
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos' /\
valid_content_pos p h0 sl pos1 (G.reveal x) pos2 /\
U32.v pos <= U32.v pos1 /\ U32.v pos2 <= U32.v pos' /\
B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2) /\
inv h (Ghost.reveal l1) (Ghost.reveal x :: Ghost.reveal l2) pos1 /\
contents_list p h0 sl pos pos' == Ghost.reveal l1 `L.append` (Ghost.reveal x :: Ghost.reveal l2)
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (Ghost.reveal l1 `L.append` [contents p h0 sl pos1]) (Ghost.reveal l2) pos2
))
))
: HST.Stack unit
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (contents_list p h sl pos pos') [] pos'
))
= let _ = list_fold_left_gen
p
j
sl
pos pos'
h0
l
inv
inv_frame
(fun _ -> False)
(fun _ _ -> ())
(fun pos1 pos2 ->
let h = HST.get () in
valid_list_cons p h sl pos1 pos';
valid_list_append p h sl pos pos1 pos';
body
pos1
pos2
(Ghost.hide (contents_list p h sl pos pos1))
(Ghost.hide (contents p h sl pos1))
(Ghost.hide (contents_list p h sl pos2 pos'))
;
true
)
in
()
inline_for_extraction
let list_length
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v res == L.length (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
B.fresh_frame_modifies h0 h1;
let blen : BF.pointer U32.t = BF.alloca 0ul 1ul in
let h2 = HST.get () in
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer blen))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_buffer blen) h2 h /\
B.live h blen /\ (
let len = U32.v (Seq.index (B.as_seq h blen) 0) in
len <= U32.v pos1 /\ // necessary to prove that length computations do not overflow
len == L.length l1
))
(fun h l1 l2 pos1 h' ->
B.modifies_only_not_unused_in (B.loc_buffer blen) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
B.upd blen 0ul (B.index blen 0ul `U32.add` 1ul);
Classical.forall_intro_2 (list_length_append #t)
)
;
let len = B.index blen 0ul in
HST.pop_frame ();
len
#push-options "--z3rlimit 32 --fuel 2 --ifuel 1"
inline_for_extraction
let list_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(f: (t -> Tot bool))
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
(x: Ghost.erased t) ->
HST.Stack bool
(requires (fun h -> valid_content p h sl pos (G.reveal x)))
(ensures (fun h res h' -> B.modifies B.loc_none h h' /\ res == f (G.reveal x)))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
(#rrel_out #rel_out: _)
(sl_out : slice rrel_out rel_out)
(pos_out : U32.t)
: HST.Stack U32.t
(requires (fun h ->
U32.v pos_out + U32.v pos' - U32.v pos <= U32.v sl_out.len /\
valid_list p h sl pos pos' /\
B.loc_disjoint (loc_slice_from_to sl pos pos') (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
live_slice h sl_out
))
(ensures (fun h pos_out' h' ->
B.modifies (loc_slice_from_to sl_out pos_out pos_out') h h' /\
U32.v pos_out' - U32.v pos_out <= U32.v pos' - U32.v pos /\
valid_list p h' sl_out pos_out pos_out' /\
contents_list p h' sl_out pos_out pos_out' == L.filter f (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
//B.fresh_frame_modifies h0 h1;
let bpos_out' : BF.pointer U32.t = BF.alloca pos_out 1ul in
let h2 = HST.get () in
let inv (h: HS.mem) (l1 l2: list t) (pos1: U32.t) : GTot Type0 =
B.live h bpos_out' /\ (
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
valid_list p h sl_out pos_out pos_out' /\
contents_list p h sl_out pos_out pos_out' == L.filter f l1 /\
U32.v pos_out' - U32.v pos1 <= U32.v pos_out - U32.v pos // necessary to prove that length computations do not overflow
)
in
valid_list_nil p h2 sl_out pos_out;
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))))
inv
(fun h l1 l2 pos1 h' ->
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies_only_not_unused_in (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
let pos_out1 = B.index bpos_out' 0ul in
list_filter_append f (G.reveal l1) [G.reveal x];
if f' sl pos1 x
then begin
assert (B.loc_includes (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) (loc_slice_from_to sl_out pos_out1 (pos_out1 `U32.add` (pos2 `U32.sub` pos1))));
let h = HST.get () in
writable_weaken sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (U32.v pos_out1) (U32.v pos_out1 + (U32.v pos2 - U32.v pos1));
let pos_out2 = copy_strong p sl pos1 pos2 sl_out pos_out1 in
B.upd bpos_out' 0ul pos_out2;
let h' = HST.get () in
writable_modifies sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (B.loc_region_only true (HS.get_tip h1)) h';
valid_list_nil p h' sl_out pos_out2;
valid_list_cons p h' sl_out pos_out1 pos_out2;
valid_list_append p h' sl_out pos_out pos_out1 pos_out2
end else
L.append_l_nil (L.filter f (G.reveal l1))
)
;
let pos_out' = B.index bpos_out' 0ul in
HST.pop_frame ();
pos_out'
#pop-options
#push-options "--z3rlimit 64 --fuel 2 --ifuel 1"
inline_for_extraction
let list_nth
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(i: U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos' /\
U32.v i < L.length (contents_list p h sl pos pos')
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
valid_list p h sl pos res /\
valid p h sl res /\
valid_list p h sl (get_valid_pos p h sl res) pos' /\
L.length (contents_list p h sl pos res) == U32.v i /\
contents p h sl res == L.index (contents_list p h sl pos pos') (U32.v i)
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bpos1 = BF.alloca pos 1ul in
let bk = BF.alloca 0ul 1ul in
let h2 = HST.get () in
valid_list_nil p h0 sl pos;
let _ : bool = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
let k = Seq.index (B.as_seq h bk) 0 in
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bpos1 /\
B.live h bk /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v k /\
U32.v k <= U32.v i
)
(fun h _ _ _ h' ->
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bpos1);
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bk);
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
let pos1 = Seq.index (B.as_seq h bpos1) 0 in
B.live h bpos1 /\
valid p h0 sl pos1 /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl (get_valid_pos p h0 sl pos1) pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v i /\
contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i)
)
(fun _ _ ->
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 ->
let k = B.index bk 0ul in
if k = i
then begin
B.upd bpos1 0ul pos1;
valid_list_cons_recip p h0 sl pos1 pos';
list_index_append (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') (U32.v i);
valid_list_append p h0 sl pos pos1 pos' ;
assert (contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i));
false
end else begin
B.upd bk 0ul (k `U32.add` 1ul);
let h = HST.get () in
B.modifies_only_not_unused_in B.loc_none h0 h;
valid_list_snoc p h0 sl pos pos1;
assert (valid p h0 sl pos1);
assert (pos2 == get_valid_pos p h0 sl pos1);
assert (valid_list p h0 sl pos pos2);
list_length_append (contents_list p h0 sl pos pos1) [contents p h0 sl pos1];
true
end
)
in
let res = B.index bpos1 0ul in
HST.pop_frame ();
res
inline_for_extraction
let list_find
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(f: (t -> Tot bool)) // should be GTot, but List.find requires Tot
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack bool
(requires (fun h ->
valid p h sl pos
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == f (contents p h sl pos)
))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let l = contents_list p h sl pos pos' in
if res = pos'
then L.find f l == None
else
U32.v pos <= U32.v res /\
valid p h sl res /\ (
let x = contents p h sl res in
U32.v res + content_length p h sl res <= U32.v pos' /\
f x == true /\
L.find f l == Some x
)
)))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bres = BF.alloca 0ul 1ul in
let h2 = HST.get () in
let not_found = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\
valid_list p h0 sl pos1 pos' /\
l2 == contents_list p h0 sl pos1 pos' /\
L.find f (contents_list p h0 sl pos pos') == L.find f l2
)
(fun h _ _ _ h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\ (
let res = Seq.index (B.as_seq h bres) 0 in
U32.v pos <= U32.v res /\
valid p h0 sl res /\ (
let x = contents p h0 sl res in
U32.v res + content_length p h0 sl res <= U32.v pos' /\
f x == true /\
L.find f (contents_list p h0 sl pos pos') == Some x
)))
(fun h h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun pos1 pos2 ->
if f' sl pos1
then begin
B.upd bres 0ul pos1;
false
end
else true
)
in
let res =
if not_found
then pos'
else B.index bres 0ul
in
HST.pop_frame ();
res
#pop-options
let rec list_existsb_find
(#a: Type)
(f: (a -> Tot bool))
(l: list a)
: Lemma
(L.existsb f l == Some? (L.find f l))
= match l with
| [] -> ()
| x :: q ->
if f x
then ()
else list_existsb_find f q
inline_for_extraction
noextract
let list_existsb
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(f: (t -> Tot bool)) // should be GTot, but List.find requires Tot
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack bool
(requires (fun h ->
valid p h sl pos
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == f (contents p h sl pos)
))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack bool
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == L.existsb f (contents_list p h sl pos pos')
))
= let h = HST.get () in
list_existsb_find f (contents_list p h sl pos pos');
let posn = list_find j f f' sl pos pos' in
posn <> pos'
#push-options "--fuel 2 --ifuel 1 --z3rlimit 256 --query_stats"
inline_for_extraction
noextract
let list_flatten_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 { k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0 } )
(f: (t1 -> Tot (list t2))) // should be GTot, but List.Tot.map requires Tot
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1' : U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2: U32.t {
valid_list p1 h0 sl1 pos1 pos1' /\
U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\
U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32
})
(f' : (
(pos1_: U32.t) ->
(pos2_: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\
U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\ (
let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2_ res /\
contents_list p2 h' sl2 pos2_ res == y
)))
))
: HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\ (
let y = List.Tot.flatten (List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1')) in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2 res /\
contents_list p2 h' sl2 pos2 res == y
)))
= let hz = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bpos2_ = BF.alloca pos2 1ul in
let h2 = HST.get () in
valid_list_nil p2 hz sl2 pos2;
let fits = list_fold_left_gen
p1
j1
sl1
pos1 pos1'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2))
(fun h ll lr _ ->
B.modifies (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h /\
B.live h bpos2_ /\ (
let pos2_ = Seq.index (B.as_seq h bpos2_) 0 in
contents_list p1 h0 sl1 pos1 pos1' == ll `List.Tot.append` lr /\
valid_list p2 h sl2 pos2 pos2_ /\
contents_list p2 h sl2 pos2 pos2_ == List.Tot.flatten (List.Tot.map f ll) /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(fun h _ _ _ h' ->
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun h ->
B.modifies (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h /\
U32.v pos2 + serialized_list_length s2 (List.Tot.flatten (List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1'))) > U32.v sl2.len
)
(fun h h' ->
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1l pos1r ->
let pos2_ = B.index bpos2_ 0ul in
let h = HST.get () in
writable_weaken sl2.base (U32.v pos2) (U32.v sl2.len) h (U32.v pos2_) (U32.v sl2.len);
valid_pos_frame_strong p1 h0 sl1 pos1l pos1r (loc_slice_from sl2 pos2) hz;
let res = f' pos1l pos2_ in
let fits = not (res = max_uint32) in
if fits then begin
B.upd bpos2_ 0ul res;
let h' = HST.get () in
writable_modifies sl2.base (U32.v pos2) (U32.v sl2.len) h (B.loc_region_only true (HS.get_tip h1)) h' ;
List.Tot.append_assoc (contents_list p1 h0 sl1 pos1 pos1l) [contents p1 h0 sl1 pos1l] (contents_list p1 h0 sl1 pos1r pos1');
list_flatten_map_append f (contents_list p1 h0 sl1 pos1 pos1l) [contents p1 h0 sl1 pos1l];
valid_list_snoc p1 h0 sl1 pos1 pos1l;
valid_list_append p2 h' sl2 pos2 pos2_ res;
valid_list_nil p2 h' sl2 res;
valid_list_append p2 h' sl2 pos2_ res res
end else begin
let h' = HST.get () in
valid_list_cons p1 h0 sl1 pos1l pos1' ;
valid_list_append p1 h0 sl1 pos1 pos1l pos1' ;
list_flatten_map_append f (contents_list p1 h0 sl1 pos1 pos1l) (contents_list p1 h0 sl1 pos1l pos1');
serialized_list_length_append s2 (L.flatten (L.map f (contents_list p1 h0 sl1 pos1 pos1l))) (L.flatten (L.map f (contents_list p1 h0 sl1 pos1l pos1')));
serialized_list_length_append s2 (f (contents p1 h0 sl1 pos1l)) (L.flatten (L.map f (contents_list p1 h0 sl1 pos1r pos1')));
valid_list_serialized_list_length s2 h' sl2 pos2 pos2_
end;
fits
)
in
let res =
if fits
then B.index bpos2_ 0ul
else max_uint32
in
HST.pop_frame ();
res
#pop-options
#push-options "--z3rlimit 16 --query_stats"
inline_for_extraction
noextract
let list_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 { k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0 } )
(f: (t1 -> Tot t2)) // should be GTot, but List.Tot.map requires Tot
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1' : U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2: U32.t {
valid_list p1 h0 sl1 pos1 pos1' /\
U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\
U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32
})
(f' : (
(pos1_: U32.t) ->
(pos2_: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\
U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\ (
let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_length s2 y > U32.v sl2.len
else
valid_content_pos p2 h' sl2 pos2_ y res
)))
))
: HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\ (
let y = List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1') in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2 res /\
contents_list p2 h' sl2 pos2 res == y
)))
= list_map_list_flatten_map f (contents_list p1 h0 sl1 pos1 pos1');
list_flatten_map
j1
s2
(fun x -> [f x])
h0
sl1 pos1 pos1'
sl2 pos2
(fun pos1 pos2 ->
let res = f' pos1 pos2 in
let h = HST.get () in
if res = max_uint32
then begin
serialized_list_length_nil s2;
serialized_list_length_cons s2 (f (contents p1 h0 sl1 pos1)) []
end
else begin
valid_list_nil p2 h sl2 res;
valid_list_cons p2 h sl2 pos2 res
end;
res
)
(* Example: trivial printers *)
inline_for_extraction
let print_list
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(print: ((#rrel: _) -> (#rel: _) -> (sl: slice rrel rel) -> (pos: U32.t) -> HST.Stack unit (requires (fun h -> valid p h sl pos)) (ensures (fun h _ h' -> B.modifies B.loc_none h h'))))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack unit
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h _ h' ->
B.modifies B.loc_none h h'
))
= let h0 = HST.get () in
list_fold_left
p
j
sl
pos pos'
h0
(Ghost.hide B.loc_none)
(fun _ _ _ _ -> True)
(fun _ _ _ _ _ -> ())
(fun pos1 _ _ _ _ ->
print sl pos1
)
(* Monotonicity *)
inline_for_extraction
let compl_t (t: Type) = U32.t -> t -> U32.t -> Tot (B.spred byte)
let wvalid
(#t: Type) (#k: parser_kind) (p: parser k t) (#rrel #rel: _) (s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
(gpos' : Ghost.erased U32.t)
(gv: Ghost.erased t)
(x: Seq.seq byte)
: GTot prop
=
U32.v pos <= U32.v (Ghost.reveal gpos') /\
U32.v (Ghost.reveal gpos') <= U32.v s.len /\
U32.v s.len <= Seq.length x /\
parse p (Seq.slice x (U32.v pos) (U32.v s.len)) == Some (Ghost.reveal gv, U32.v (Ghost.reveal gpos') - U32.v pos) /\
compl pos (Ghost.reveal gv) (Ghost.reveal gpos') x
let wvalid_valid_content_pos
(#t: Type) (#k: parser_kind) (p: parser k t) (#rrel #rel: _) (s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
(gpos' : Ghost.erased U32.t)
(gv: Ghost.erased t)
(x: Seq.seq byte)
(h: HS.mem)
: Lemma
(requires (
wvalid p s compl pos gpos' gv x /\
live_slice h s /\
x == B.as_seq h s.base
))
(ensures (
valid_content_pos p h s pos gv gpos'
))
=
valid_facts p h s pos
inline_for_extraction
noeq
type irepr (#t: Type) (#k: parser_kind) (p: parser k t) (#rrel #rel: _) (s: slice rrel rel) (compl: compl_t t) =
| IRepr:
(pos: U32.t) ->
(gpos' : Ghost.erased U32.t) ->
(gv: Ghost.erased t) ->
(irepr_correct: squash (
U32.v pos <= U32.v (Ghost.reveal gpos') /\
U32.v (Ghost.reveal gpos') <= U32.v s.len /\
B.witnessed s.base (wvalid p s compl pos gpos' gv)
)) ->
irepr p s compl
inline_for_extraction
let irepr_pos
(#t: Type) (#k: parser_kind) (#p: parser k t) (#rrel #rel: _) (#s: slice rrel rel) (#compl: compl_t t) (x: irepr p s compl) : Tot U32.t =
IRepr?.pos x
let irepr_pos'
(#t: Type) (#k: parser_kind) (#p: parser k t) (#rrel #rel: _) (#s: slice rrel rel) (#compl: compl_t t) (x: irepr p s compl) : Ghost U32.t
(requires True)
(ensures (fun y -> True))
= Ghost.reveal (IRepr?.gpos' x)
#push-options "--ifuel 1 --fuel 2"
let irepr_pos'_post
(#t: Type) (#k: parser_kind) (#p: parser k t) (#rrel #rel: _) (#s: slice rrel rel) (#compl: compl_t t) (x: irepr p s compl) : Lemma
(requires True)
(ensures (
let y = irepr_pos' x in
U32.v (irepr_pos x) <= U32.v y /\ U32.v y <= U32.v s.len
))
[SMTPat (irepr_pos' x)]
= ()
let irepr_v
(#t: Type) (#k: parser_kind) (#p: parser k t) (#rrel #rel: _) (#s: slice rrel rel) (#compl: compl_t t) (x: irepr p s compl) : GTot t
= Ghost.reveal (IRepr?.gv x)
inline_for_extraction
let witness_valid_gen
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
: HST.Stack (irepr p s compl)
(requires (fun h ->
valid p h s pos /\
B.stable_on (wvalid p s compl pos (Ghost.hide (get_valid_pos p h s pos)) (Ghost.hide (contents p h s pos))) (buffer_srel_of_srel rel) /\
compl pos (contents p h s pos) (get_valid_pos p h s pos) (B.as_seq h s.base)
))
(ensures (fun h res h' ->
h' == h /\
irepr_pos res == pos /\
valid_content_pos p h s pos (irepr_v res) (irepr_pos' res)
))
= let h = HST.get () in
[@inline_let]
let gpos' = Ghost.hide (get_valid_pos p h s pos) in
[@inline_let]
let gv = Ghost.hide (contents p h s pos) in
[@inline_let]
let _ = valid_facts p h s pos in
B.witness_p s.base (wvalid p s compl pos gpos' gv);
IRepr pos gpos' gv ()
inline_for_extraction
let recall_valid_gen
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(#s: slice rrel rel)
(#compl: compl_t t)
(i: irepr p s compl)
: HST.Stack unit
(requires (fun h -> B.recallable s.base \/ live_slice h s))
(ensures (fun h _ h' ->
h' == h /\
live_slice h s /\
valid_content_pos p h s (irepr_pos i) (irepr_v i) (irepr_pos' i) /\
compl (irepr_pos i) (irepr_v i) (irepr_pos' i) (B.as_seq h s.base) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 2,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val recall_valid_gen
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(#s: slice rrel rel)
(#compl: compl_t t)
(i: irepr p s compl)
: HST.Stack unit
(requires (fun h -> B.recallable s.base \/ live_slice h s))
(ensures
(fun h _ h' ->
h' == h /\ live_slice h s /\
valid_content_pos p h s (irepr_pos i) (irepr_v i) (irepr_pos' i) /\
compl (irepr_pos i) (irepr_v i) (irepr_pos' i) (B.as_seq h s.base))) | [] | LowParse.Low.Base.recall_valid_gen | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | i: LowParse.Low.Base.irepr p s compl -> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 103,
"end_line": 2135,
"start_col": 1,
"start_line": 2133
} |
FStar.HyperStack.ST.Stack | val validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel #rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h -> B.live h b /\ U32.v len <= B.length b))
(ensures
(fun h res h' ->
B.modifies B.loc_none h h' /\
(let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))))) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL) | val validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel #rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h -> B.live h b /\ U32.v len <= B.length b))
(ensures
(fun h res h' ->
B.modifies B.loc_none h h' /\
(let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul)))))
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel #rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h -> B.live h b /\ U32.v len <= B.length b))
(ensures
(fun h res h' ->
B.modifies B.loc_none h h' /\
(let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))))) = | true | null | false | if is_error (Cast.uint32_to_uint64 len)
then false
else
[@@ inline_let ]let sl = make_slice b len in
is_success (v sl 0uL) | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Low.Base.validator",
"LowStar.Monotonic.Buffer.srel",
"LowParse.Bytes.byte",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"LowParse.Low.ErrorCode.is_error",
"FStar.Int.Cast.uint32_to_uint64",
"Prims.bool",
"LowParse.Low.ErrorCode.is_success",
"FStar.UInt64.t",
"LowParse.Slice.srel_of_buffer_srel",
"FStar.UInt64.__uint_to_t",
"LowParse.Slice.slice",
"LowParse.Slice.make_slice",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowStar.Monotonic.Buffer.live",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.length",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_none",
"Prims.l_iff",
"Prims.eq2",
"LowParse.Low.Base.Spec.valid",
"FStar.UInt32.__uint_to_t"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul)) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel #rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h -> B.live h b /\ U32.v len <= B.length b))
(ensures
(fun h res h' ->
B.modifies B.loc_none h h' /\
(let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))))) | [] | LowParse.Low.Base.validate | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
v: LowParse.Low.Base.validator p ->
b: LowStar.Monotonic.Buffer.mbuffer LowParse.Bytes.byte rrel rel ->
len: FStar.UInt32.t
-> FStar.HyperStack.ST.Stack Prims.bool | {
"end_col": 25,
"end_line": 263,
"start_col": 2,
"start_line": 258
} |
FStar.Pervasives.Lemma | val writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma (requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures
(let s = B.as_seq h b in
s `rel` (Seq.upd s i v) /\ writable b pos pos' (B.g_upd b i v h))) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl') | val writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma (requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures
(let s = B.as_seq h b in
s `rel` (Seq.upd s i v) /\ writable b pos pos' (B.g_upd b i v h)))
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma (requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures
(let s = B.as_seq h b in
s `rel` (Seq.upd s i v) /\ writable b pos pos' (B.g_upd b i v h))) = | false | null | true | let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert ((Seq.upd s i v) `Seq.equal` (Seq.replace_subseq s pos pos' sl')) | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.nat",
"FStar.Monotonic.HyperStack.mem",
"Prims._assert",
"FStar.Seq.Base.equal",
"FStar.Seq.Base.upd",
"FStar.Seq.Properties.replace_subseq",
"Prims.unit",
"LowParse.Low.Base.writable_upd_seq",
"FStar.Seq.Base.seq",
"FStar.Seq.Base.slice",
"Prims.op_Subtraction",
"LowStar.Monotonic.Buffer.as_seq",
"Prims.l_and",
"LowParse.Low.Base.writable",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_LessThan",
"LowStar.Monotonic.Buffer.length",
"Prims.squash",
"LowStar.Monotonic.Buffer.g_upd",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma (requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures
(let s = B.as_seq h b in
s `rel` (Seq.upd s i v) /\ writable b pos pos' (B.g_upd b i v h))) | [] | LowParse.Low.Base.writable_upd | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
b: LowStar.Monotonic.Buffer.mbuffer t rrel rel ->
pos: Prims.nat ->
pos': Prims.nat ->
h: FStar.Monotonic.HyperStack.mem ->
i: Prims.nat ->
v: t
-> FStar.Pervasives.Lemma
(requires
LowParse.Low.Base.writable b pos pos' h /\ pos <= i /\ i < pos' /\
pos' <= LowStar.Monotonic.Buffer.length b)
(ensures
(let s = LowStar.Monotonic.Buffer.as_seq h b in
rel s (FStar.Seq.Base.upd s i v) /\
LowParse.Low.Base.writable b pos pos' (LowStar.Monotonic.Buffer.g_upd b i v h))) | {
"end_col": 70,
"end_line": 717,
"start_col": 1,
"start_line": 714
} |
FStar.Pervasives.Lemma | val seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t)
: Lemma (requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures
(s `seq_starts_with` s1 /\ (Seq.slice s (Seq.length s1) (Seq.length s)) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))] | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3)) | val seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t)
: Lemma (requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures
(s `seq_starts_with` s1 /\ (Seq.slice s (Seq.length s1) (Seq.length s)) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t)
: Lemma (requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures
(s `seq_starts_with` s1 /\ (Seq.slice s (Seq.length s1) (Seq.length s)) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))] = | false | null | true | let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` ((s1 `Seq.append` s2) `Seq.append` s3)) | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"lemma"
] | [
"FStar.Seq.Base.seq",
"Prims._assert",
"FStar.Seq.Base.equal",
"FStar.Seq.Base.append",
"FStar.Seq.Base.slice",
"Prims.op_Addition",
"FStar.Seq.Base.length",
"Prims.unit",
"LowParse.Low.Base.seq_starts_with",
"Prims.squash",
"Prims.l_and",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
)) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t)
: Lemma (requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures
(s `seq_starts_with` s1 /\ (Seq.slice s (Seq.length s1) (Seq.length s)) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))] | [] | LowParse.Low.Base.seq_starts_with_append_r_elim | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | s: FStar.Seq.Base.seq t -> s1: FStar.Seq.Base.seq t -> s2: FStar.Seq.Base.seq t
-> FStar.Pervasives.Lemma
(requires LowParse.Low.Base.seq_starts_with s (FStar.Seq.Base.append s1 s2))
(ensures
LowParse.Low.Base.seq_starts_with s s1 /\
LowParse.Low.Base.seq_starts_with (FStar.Seq.Base.slice s
(FStar.Seq.Base.length s1)
(FStar.Seq.Base.length s))
s2)
[SMTPat (LowParse.Low.Base.seq_starts_with s (FStar.Seq.Base.append s1 s2))] | {
"end_col": 61,
"end_line": 457,
"start_col": 1,
"start_line": 456
} |
FStar.Pervasives.Lemma | val writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': nat)
(h: HS.mem)
(sl': Seq.seq t)
(h': HS.mem)
: Lemma
(requires
(writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos /\
(B.as_seq h' b) `Seq.equal` (Seq.replace_subseq (B.as_seq h b) pos pos' sl') /\
B.live h' b))
(ensures
(let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\ writable b pos pos' h')) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
) | val writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': nat)
(h: HS.mem)
(sl': Seq.seq t)
(h': HS.mem)
: Lemma
(requires
(writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos /\
(B.as_seq h' b) `Seq.equal` (Seq.replace_subseq (B.as_seq h b) pos pos' sl') /\
B.live h' b))
(ensures
(let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\ writable b pos pos' h'))
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': nat)
(h: HS.mem)
(sl': Seq.seq t)
(h': HS.mem)
: Lemma
(requires
(writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos /\
(B.as_seq h' b) `Seq.equal` (Seq.replace_subseq (B.as_seq h b) pos pos' sl') /\
B.live h' b))
(ensures
(let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\ writable b pos pos' h')) = | false | null | true | let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` (Seq.replace_subseq s pos pos' sl));
assert (s' `Seq.equal` (Seq.replace_subseq s pos pos' sl'));
writable_intro b
pos
pos'
h'
()
(fun s1 s2 ->
assert ((Seq.replace_subseq s' pos pos' s1) `Seq.equal` (Seq.replace_subseq s pos pos' s1));
assert ((Seq.replace_subseq s' pos pos' s2) `Seq.equal` (Seq.replace_subseq s pos pos' s2))) | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.nat",
"FStar.Monotonic.HyperStack.mem",
"FStar.Seq.Base.seq",
"LowParse.Low.Base.writable_intro",
"FStar.Seq.Properties.lseq",
"Prims.op_Subtraction",
"Prims._assert",
"FStar.Seq.Base.equal",
"FStar.Seq.Properties.replace_subseq",
"Prims.unit",
"FStar.Seq.Base.slice",
"LowStar.Monotonic.Buffer.as_seq",
"Prims.l_and",
"LowParse.Low.Base.writable",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"LowStar.Monotonic.Buffer.length",
"Prims.eq2",
"Prims.int",
"FStar.Seq.Base.length",
"LowStar.Monotonic.Buffer.live",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h' | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': nat)
(h: HS.mem)
(sl': Seq.seq t)
(h': HS.mem)
: Lemma
(requires
(writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos /\
(B.as_seq h' b) `Seq.equal` (Seq.replace_subseq (B.as_seq h b) pos pos' sl') /\
B.live h' b))
(ensures
(let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\ writable b pos pos' h')) | [] | LowParse.Low.Base.writable_replace_subseq | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
b: LowStar.Monotonic.Buffer.mbuffer t rrel rel ->
pos: Prims.nat ->
pos': Prims.nat ->
h: FStar.Monotonic.HyperStack.mem ->
sl': FStar.Seq.Base.seq t ->
h': FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
LowParse.Low.Base.writable b pos pos' h /\ pos <= pos' /\
pos' <= LowStar.Monotonic.Buffer.length b /\ FStar.Seq.Base.length sl' == pos' - pos /\
FStar.Seq.Base.equal (LowStar.Monotonic.Buffer.as_seq h' b)
(FStar.Seq.Properties.replace_subseq (LowStar.Monotonic.Buffer.as_seq h b) pos pos' sl') /\
LowStar.Monotonic.Buffer.live h' b)
(ensures
(let s = LowStar.Monotonic.Buffer.as_seq h b in
let s' = FStar.Seq.Properties.replace_subseq s pos pos' sl' in
rel s s' /\ LowParse.Low.Base.writable b pos pos' h')) | {
"end_col": 3,
"end_line": 656,
"start_col": 1,
"start_line": 648
} |
FStar.HyperStack.ST.Stack | val copy_weak_with_length
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1)
(spos spos': U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires
(fun h ->
k.parser_kind_subkind == Some ParserStrong /\ valid_pos p h src spos spos' /\
live_slice h dst /\ U32.v dpos <= U32.v dst.len /\ U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from dst dpos)))
(ensures
(fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\
(if dpos' = max_uint32
then U32.v dpos + content_length p h src spos > U32.v dst.len
else valid_content_pos p h' dst dpos (contents p h src spos) dpos'))) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let copy_weak_with_length
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= if (dst.len `U32.sub` dpos) `U32.lt` (spos' `U32.sub` spos)
then max_uint32
else copy_strong p src spos spos' dst dpos | val copy_weak_with_length
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1)
(spos spos': U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires
(fun h ->
k.parser_kind_subkind == Some ParserStrong /\ valid_pos p h src spos spos' /\
live_slice h dst /\ U32.v dpos <= U32.v dst.len /\ U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from dst dpos)))
(ensures
(fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\
(if dpos' = max_uint32
then U32.v dpos + content_length p h src spos > U32.v dst.len
else valid_content_pos p h' dst dpos (contents p h src spos) dpos')))
let copy_weak_with_length
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1)
(spos spos': U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires
(fun h ->
k.parser_kind_subkind == Some ParserStrong /\ valid_pos p h src spos spos' /\
live_slice h dst /\ U32.v dpos <= U32.v dst.len /\ U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from dst dpos)))
(ensures
(fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\
(if dpos' = max_uint32
then U32.v dpos + content_length p h src spos > U32.v dst.len
else valid_content_pos p h' dst dpos (contents p h src spos) dpos'))) = | true | null | false | if (dst.len `U32.sub` dpos) `U32.lt` (spos' `U32.sub` spos)
then max_uint32
else copy_strong p src spos spos' dst dpos | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [] | [
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Slice.slice",
"FStar.UInt32.t",
"FStar.UInt32.lt",
"FStar.UInt32.sub",
"LowParse.Slice.__proj__Mkslice__item__len",
"LowParse.Low.ErrorCode.max_uint32",
"Prims.bool",
"LowParse.Low.Base.copy_strong",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"LowParse.Low.Base.Spec.valid_pos",
"LowParse.Slice.live_slice",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.UInt32.v",
"Prims.op_LessThan",
"LowParse.Low.Base.writable",
"LowParse.Slice.buffer_srel_of_srel",
"LowParse.Slice.__proj__Mkslice__item__base",
"Prims.op_Addition",
"Prims.op_Subtraction",
"LowStar.Monotonic.Buffer.loc_disjoint",
"LowParse.Slice.loc_slice_from_to",
"LowParse.Slice.loc_slice_from",
"LowStar.Monotonic.Buffer.modifies",
"Prims.op_Equality",
"Prims.op_GreaterThan",
"LowParse.Low.Base.Spec.content_length",
"LowParse.Low.Base.Spec.valid_content_pos",
"LowParse.Low.Base.Spec.contents",
"Prims.logical"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_weak
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
U32.v sl.len < U32.v max_uint32 /\
writable sl.base (U32.v pos) (U32.v sl.len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from sl pos) h h' /\ (
if pos' = max_uint32
then U32.v pos + serialized_length s x > U32.v sl.len
else valid_content_pos p h' sl pos x pos'
)))
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_strong
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let sq = B.as_seq h sl.base in
let len = serialized_length s x in
live_slice h sl /\
U32.v pos + len <= U32.v sl.len /\
writable sl.base (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from_to sl pos pos') h h' /\
valid_content_pos p h' sl pos x pos'
))
[@unifier_hint_injective]
inline_for_extraction
let serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(b: B.mbuffer byte rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v pos + len <= B.length b /\
writable b (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b pos (pos `U32.add` len)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x
)))
inline_for_extraction
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
= fun x #rrel #rel b pos -> s1' x b pos
inline_for_extraction
let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(posl: Ghost.erased U32.t)
(posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\
U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos) /\
Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr))
)))
=
let h0 = HST.get () in
writable_weaken b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 (U32.v pos) (U32.v pos + Seq.length (serialize s x));
let res = s32 x b pos in
let h1 = HST.get () in
let pos' = pos `U32.add` res in
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos pos';
writable_modifies b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 B.loc_none h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) (Ghost.reveal posl) pos;
B.loc_disjoint_loc_buffer_from_to b (Ghost.reveal posl) pos pos pos';
B.modifies_buffer_from_to_elim b (Ghost.reveal posl) pos (B.loc_buffer_from_to b pos pos') h0 h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos' (Ghost.reveal posr);
B.loc_disjoint_loc_buffer_from_to b pos pos' pos' (Ghost.reveal posr);
B.modifies_buffer_from_to_elim b pos' (Ghost.reveal posr) (B.loc_buffer_from_to b pos pos') h0 h1;
res
inline_for_extraction
let leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s)
= fun x #rrel #rel input pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let len = s32 x input.base pos in
[@inline_let]
let pos' = pos `U32.add` len in
let h = HST.get () in
[@inline_let] let _ =
let large = bytes_of_slice_from h input pos in
let small = bytes_of_slice_from_to h input pos pos' in
parse_strong_prefix p small large;
valid_facts p h input pos
in
pos'
inline_for_extraction
let leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32
))
: Tot (leaf_writer_weak s)
= fun x #rrel #rel input pos ->
if (input.len `U32.sub` pos) `U32.lt` sz
then max_uint32
else begin
let h = HST.get () in
writable_weaken input.base (U32.v pos) (U32.v input.len) h (U32.v pos) (U32.v pos + U32.v sz);
s32 x input pos
end
inline_for_extraction
let serializer32_of_leaf_writer_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_subkind == Some ParserStrong /\
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
))
: Tot (serializer32 s)
= fun x #rrel #rel b pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let pos' = s32 x (make_slice b (pos `U32.add` sz)) pos in
[@inline_let]
let len = pos' `U32.sub` pos in
let h = HST.get () in
[@inline_let] let _ =
valid_valid_exact p h (make_slice b (pos `U32.add` sz)) pos;
valid_exact_serialize s h (make_slice b (pos `U32.add` sz)) pos pos'
in
len
inline_for_extraction
let blit_strong
(#a:Type) (#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src:U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst:U32.t)
(len:U32.t)
: HST.Stack unit
(requires (fun h ->
B.live h src /\ B.live h dst /\
U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len)) (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)
))
= let h = HST.get () in
B.blit src idx_src dst idx_dst len;
let h' = HST.get () in
B.modifies_loc_buffer_from_to_intro dst idx_dst (idx_dst `U32.add` len) B.loc_none h h'
#push-options "--z3rlimit 16"
inline_for_extraction
let copy_strong
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
U32.v dpos + U32.v spos' - U32.v spos <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from_to dst dpos (dpos `U32.add` (spos' `U32.sub` spos)))
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos' /\
dpos' `U32.sub` dpos == spos' `U32.sub` spos
))
= let h0 = HST.get () in
let len = spos' `U32.sub` spos in
valid_facts p h0 src spos;
writable_replace_subseq_elim dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h0 (Seq.slice (B.as_seq h0 src.base) (U32.v spos) (U32.v spos'));
blit_strong src.base spos dst.base dpos len;
let h = HST.get () in
[@inline_let] let dpos' = dpos `U32.add` len in
parse_strong_prefix p (bytes_of_slice_from h0 src spos) (bytes_of_slice_from h dst dpos);
valid_facts p h dst dpos;
dpos'
#pop-options
inline_for_extraction
let copy_strong'
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\ (
let clen = content_length p h src spos in
U32.v dpos + clen <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + clen) h /\
B.loc_disjoint (loc_slice_from src spos) (loc_slice_from_to dst dpos (dpos `U32.add` (U32.uint_to_t clen)))
)))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
))
= let spos' = j src spos in
copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak_with_length
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos' | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val copy_weak_with_length
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1)
(spos spos': U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires
(fun h ->
k.parser_kind_subkind == Some ParserStrong /\ valid_pos p h src spos spos' /\
live_slice h dst /\ U32.v dpos <= U32.v dst.len /\ U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from dst dpos)))
(ensures
(fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\
(if dpos' = max_uint32
then U32.v dpos + content_length p h src spos > U32.v dst.len
else valid_content_pos p h' dst dpos (contents p h src spos) dpos'))) | [] | LowParse.Low.Base.copy_weak_with_length | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
p: LowParse.Spec.Base.parser k t ->
src: LowParse.Slice.slice rrel1 rel1 ->
spos: FStar.UInt32.t ->
spos': FStar.UInt32.t ->
dst: LowParse.Slice.slice rrel2 rel2 ->
dpos: FStar.UInt32.t
-> FStar.HyperStack.ST.Stack FStar.UInt32.t | {
"end_col": 44,
"end_line": 1108,
"start_col": 2,
"start_line": 1106
} |
FStar.HyperStack.ST.Stack | val frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel #rel: _)
(b: B.mbuffer byte rrel rel)
(posl posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires
(fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\ U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\ U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h))
(ensures
(fun h len h' ->
Seq.length (serialize s x) == U32.v len /\
(B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
(Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len))
`Seq.equal`
(serialize s x) /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
(Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos))
`Seq.equal`
(Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos)) /\
(Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)))
`Seq.equal`
(Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)))))) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(posl: Ghost.erased U32.t)
(posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\
U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos) /\
Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr))
)))
=
let h0 = HST.get () in
writable_weaken b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 (U32.v pos) (U32.v pos + Seq.length (serialize s x));
let res = s32 x b pos in
let h1 = HST.get () in
let pos' = pos `U32.add` res in
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos pos';
writable_modifies b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 B.loc_none h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) (Ghost.reveal posl) pos;
B.loc_disjoint_loc_buffer_from_to b (Ghost.reveal posl) pos pos pos';
B.modifies_buffer_from_to_elim b (Ghost.reveal posl) pos (B.loc_buffer_from_to b pos pos') h0 h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos' (Ghost.reveal posr);
B.loc_disjoint_loc_buffer_from_to b pos pos' pos' (Ghost.reveal posr);
B.modifies_buffer_from_to_elim b pos' (Ghost.reveal posr) (B.loc_buffer_from_to b pos pos') h0 h1;
res | val frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel #rel: _)
(b: B.mbuffer byte rrel rel)
(posl posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires
(fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\ U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\ U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h))
(ensures
(fun h len h' ->
Seq.length (serialize s x) == U32.v len /\
(B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
(Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len))
`Seq.equal`
(serialize s x) /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
(Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos))
`Seq.equal`
(Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos)) /\
(Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)))
`Seq.equal`
(Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr))))))
let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel #rel: _)
(b: B.mbuffer byte rrel rel)
(posl posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires
(fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\ U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\ U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h))
(ensures
(fun h len h' ->
Seq.length (serialize s x) == U32.v len /\
(B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
(Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len))
`Seq.equal`
(serialize s x) /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
(Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos))
`Seq.equal`
(Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos)) /\
(Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)))
`Seq.equal`
(Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)))))) = | true | null | false | let h0 = HST.get () in
writable_weaken b
(U32.v (Ghost.reveal posl))
(U32.v (Ghost.reveal posr))
h0
(U32.v pos)
(U32.v pos + Seq.length (serialize s x));
let res = s32 x b pos in
let h1 = HST.get () in
let pos' = pos `U32.add` res in
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos pos';
writable_modifies b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 B.loc_none h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) (Ghost.reveal posl) pos;
B.loc_disjoint_loc_buffer_from_to b (Ghost.reveal posl) pos pos pos';
B.modifies_buffer_from_to_elim b (Ghost.reveal posl) pos (B.loc_buffer_from_to b pos pos') h0 h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos' (Ghost.reveal posr);
B.loc_disjoint_loc_buffer_from_to b pos pos' pos' (Ghost.reveal posr);
B.modifies_buffer_from_to_elim b pos' (Ghost.reveal posr) (B.loc_buffer_from_to b pos pos') h0 h1;
res | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"LowParse.Low.Base.serializer32",
"LowStar.Monotonic.Buffer.srel",
"LowParse.Bytes.byte",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Ghost.erased",
"FStar.UInt32.t",
"Prims.unit",
"LowStar.Monotonic.Buffer.modifies_buffer_from_to_elim",
"FStar.Ghost.reveal",
"LowStar.Monotonic.Buffer.loc_buffer_from_to",
"LowStar.Monotonic.Buffer.loc_disjoint_loc_buffer_from_to",
"LowStar.Monotonic.Buffer.loc_includes_loc_buffer_from_to",
"LowParse.Low.Base.writable_modifies",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.loc_none",
"FStar.UInt32.add",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowParse.Low.Base.writable_weaken",
"Prims.op_Addition",
"FStar.Seq.Base.length",
"LowParse.Spec.Base.serialize",
"Prims.l_and",
"LowStar.Monotonic.Buffer.live",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"LowStar.Monotonic.Buffer.length",
"LowParse.Low.Base.writable",
"FStar.Seq.Base.seq",
"LowStar.Monotonic.Buffer.as_seq",
"Prims.nat",
"Prims.eq2",
"Prims.int",
"Prims.l_or",
"Prims.op_GreaterThanOrEqual",
"FStar.UInt.size",
"FStar.UInt32.n",
"LowStar.Monotonic.Buffer.modifies",
"FStar.Seq.Base.equal",
"FStar.Seq.Base.slice"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_weak
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
U32.v sl.len < U32.v max_uint32 /\
writable sl.base (U32.v pos) (U32.v sl.len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from sl pos) h h' /\ (
if pos' = max_uint32
then U32.v pos + serialized_length s x > U32.v sl.len
else valid_content_pos p h' sl pos x pos'
)))
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_strong
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let sq = B.as_seq h sl.base in
let len = serialized_length s x in
live_slice h sl /\
U32.v pos + len <= U32.v sl.len /\
writable sl.base (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from_to sl pos pos') h h' /\
valid_content_pos p h' sl pos x pos'
))
[@unifier_hint_injective]
inline_for_extraction
let serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(b: B.mbuffer byte rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v pos + len <= B.length b /\
writable b (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b pos (pos `U32.add` len)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x
)))
inline_for_extraction
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
= fun x #rrel #rel b pos -> s1' x b pos
inline_for_extraction
let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(posl: Ghost.erased U32.t)
(posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\
U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos) /\
Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel #rel: _)
(b: B.mbuffer byte rrel rel)
(posl posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires
(fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\ U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\ U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h))
(ensures
(fun h len h' ->
Seq.length (serialize s x) == U32.v len /\
(B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
(Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len))
`Seq.equal`
(serialize s x) /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
(Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos))
`Seq.equal`
(Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos)) /\
(Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)))
`Seq.equal`
(Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)))))) | [] | LowParse.Low.Base.frame_serializer32 | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
s32: LowParse.Low.Base.serializer32 s ->
x: t ->
b: LowStar.Monotonic.Buffer.mbuffer LowParse.Bytes.byte rrel rel ->
posl: FStar.Ghost.erased FStar.UInt32.t ->
posr: FStar.Ghost.erased FStar.UInt32.t ->
pos: FStar.UInt32.t
-> FStar.HyperStack.ST.Stack FStar.UInt32.t | {
"end_col": 5,
"end_line": 907,
"start_col": 1,
"start_line": 893
} |
FStar.HyperStack.ST.Stack | val witness_valid_gen
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
: HST.Stack (irepr p s compl)
(requires
(fun h ->
valid p h s pos /\
B.stable_on (wvalid p
s
compl
pos
(Ghost.hide (get_valid_pos p h s pos))
(Ghost.hide (contents p h s pos)))
(buffer_srel_of_srel rel) /\
compl pos (contents p h s pos) (get_valid_pos p h s pos) (B.as_seq h s.base)))
(ensures
(fun h res h' ->
h' == h /\ irepr_pos res == pos /\
valid_content_pos p h s pos (irepr_v res) (irepr_pos' res))) | [
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "BF"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let witness_valid_gen
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
: HST.Stack (irepr p s compl)
(requires (fun h ->
valid p h s pos /\
B.stable_on (wvalid p s compl pos (Ghost.hide (get_valid_pos p h s pos)) (Ghost.hide (contents p h s pos))) (buffer_srel_of_srel rel) /\
compl pos (contents p h s pos) (get_valid_pos p h s pos) (B.as_seq h s.base)
))
(ensures (fun h res h' ->
h' == h /\
irepr_pos res == pos /\
valid_content_pos p h s pos (irepr_v res) (irepr_pos' res)
))
= let h = HST.get () in
[@inline_let]
let gpos' = Ghost.hide (get_valid_pos p h s pos) in
[@inline_let]
let gv = Ghost.hide (contents p h s pos) in
[@inline_let]
let _ = valid_facts p h s pos in
B.witness_p s.base (wvalid p s compl pos gpos' gv);
IRepr pos gpos' gv () | val witness_valid_gen
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
: HST.Stack (irepr p s compl)
(requires
(fun h ->
valid p h s pos /\
B.stable_on (wvalid p
s
compl
pos
(Ghost.hide (get_valid_pos p h s pos))
(Ghost.hide (contents p h s pos)))
(buffer_srel_of_srel rel) /\
compl pos (contents p h s pos) (get_valid_pos p h s pos) (B.as_seq h s.base)))
(ensures
(fun h res h' ->
h' == h /\ irepr_pos res == pos /\
valid_content_pos p h s pos (irepr_v res) (irepr_pos' res)))
let witness_valid_gen
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
: HST.Stack (irepr p s compl)
(requires
(fun h ->
valid p h s pos /\
B.stable_on (wvalid p
s
compl
pos
(Ghost.hide (get_valid_pos p h s pos))
(Ghost.hide (contents p h s pos)))
(buffer_srel_of_srel rel) /\
compl pos (contents p h s pos) (get_valid_pos p h s pos) (B.as_seq h s.base)))
(ensures
(fun h res h' ->
h' == h /\ irepr_pos res == pos /\
valid_content_pos p h s pos (irepr_v res) (irepr_pos' res))) = | true | null | false | let h = HST.get () in
[@@ inline_let ]let gpos' = Ghost.hide (get_valid_pos p h s pos) in
[@@ inline_let ]let gv = Ghost.hide (contents p h s pos) in
[@@ inline_let ]let _ = valid_facts p h s pos in
B.witness_p s.base (wvalid p s compl pos gpos' gv);
IRepr pos gpos' gv () | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"LowParse.Low.Base.compl_t",
"FStar.UInt32.t",
"LowParse.Low.Base.IRepr",
"LowParse.Low.Base.irepr",
"Prims.unit",
"LowStar.Monotonic.Buffer.witness_p",
"LowParse.Slice.buffer_srel_of_srel",
"LowParse.Slice.__proj__Mkslice__item__base",
"LowParse.Low.Base.wvalid",
"LowParse.Low.Base.Spec.valid_facts",
"FStar.Ghost.erased",
"FStar.Ghost.hide",
"LowParse.Low.Base.Spec.contents",
"LowParse.Low.Base.Spec.get_valid_pos",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"Prims.l_and",
"LowParse.Low.Base.Spec.valid",
"LowStar.Monotonic.Buffer.stable_on",
"LowStar.Monotonic.Buffer.as_seq",
"Prims.eq2",
"LowParse.Low.Base.irepr_pos",
"LowParse.Low.Base.Spec.valid_content_pos",
"LowParse.Low.Base.irepr_v",
"LowParse.Low.Base.irepr_pos'"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_weak
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
U32.v sl.len < U32.v max_uint32 /\
writable sl.base (U32.v pos) (U32.v sl.len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from sl pos) h h' /\ (
if pos' = max_uint32
then U32.v pos + serialized_length s x > U32.v sl.len
else valid_content_pos p h' sl pos x pos'
)))
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_strong
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let sq = B.as_seq h sl.base in
let len = serialized_length s x in
live_slice h sl /\
U32.v pos + len <= U32.v sl.len /\
writable sl.base (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from_to sl pos pos') h h' /\
valid_content_pos p h' sl pos x pos'
))
[@unifier_hint_injective]
inline_for_extraction
let serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(b: B.mbuffer byte rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v pos + len <= B.length b /\
writable b (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b pos (pos `U32.add` len)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x
)))
inline_for_extraction
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
= fun x #rrel #rel b pos -> s1' x b pos
inline_for_extraction
let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(posl: Ghost.erased U32.t)
(posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\
U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos) /\
Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr))
)))
=
let h0 = HST.get () in
writable_weaken b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 (U32.v pos) (U32.v pos + Seq.length (serialize s x));
let res = s32 x b pos in
let h1 = HST.get () in
let pos' = pos `U32.add` res in
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos pos';
writable_modifies b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 B.loc_none h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) (Ghost.reveal posl) pos;
B.loc_disjoint_loc_buffer_from_to b (Ghost.reveal posl) pos pos pos';
B.modifies_buffer_from_to_elim b (Ghost.reveal posl) pos (B.loc_buffer_from_to b pos pos') h0 h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos' (Ghost.reveal posr);
B.loc_disjoint_loc_buffer_from_to b pos pos' pos' (Ghost.reveal posr);
B.modifies_buffer_from_to_elim b pos' (Ghost.reveal posr) (B.loc_buffer_from_to b pos pos') h0 h1;
res
inline_for_extraction
let leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s)
= fun x #rrel #rel input pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let len = s32 x input.base pos in
[@inline_let]
let pos' = pos `U32.add` len in
let h = HST.get () in
[@inline_let] let _ =
let large = bytes_of_slice_from h input pos in
let small = bytes_of_slice_from_to h input pos pos' in
parse_strong_prefix p small large;
valid_facts p h input pos
in
pos'
inline_for_extraction
let leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32
))
: Tot (leaf_writer_weak s)
= fun x #rrel #rel input pos ->
if (input.len `U32.sub` pos) `U32.lt` sz
then max_uint32
else begin
let h = HST.get () in
writable_weaken input.base (U32.v pos) (U32.v input.len) h (U32.v pos) (U32.v pos + U32.v sz);
s32 x input pos
end
inline_for_extraction
let serializer32_of_leaf_writer_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_subkind == Some ParserStrong /\
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
))
: Tot (serializer32 s)
= fun x #rrel #rel b pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let pos' = s32 x (make_slice b (pos `U32.add` sz)) pos in
[@inline_let]
let len = pos' `U32.sub` pos in
let h = HST.get () in
[@inline_let] let _ =
valid_valid_exact p h (make_slice b (pos `U32.add` sz)) pos;
valid_exact_serialize s h (make_slice b (pos `U32.add` sz)) pos pos'
in
len
inline_for_extraction
let blit_strong
(#a:Type) (#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src:U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst:U32.t)
(len:U32.t)
: HST.Stack unit
(requires (fun h ->
B.live h src /\ B.live h dst /\
U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len)) (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)
))
= let h = HST.get () in
B.blit src idx_src dst idx_dst len;
let h' = HST.get () in
B.modifies_loc_buffer_from_to_intro dst idx_dst (idx_dst `U32.add` len) B.loc_none h h'
#push-options "--z3rlimit 16"
inline_for_extraction
let copy_strong
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
U32.v dpos + U32.v spos' - U32.v spos <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from_to dst dpos (dpos `U32.add` (spos' `U32.sub` spos)))
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos' /\
dpos' `U32.sub` dpos == spos' `U32.sub` spos
))
= let h0 = HST.get () in
let len = spos' `U32.sub` spos in
valid_facts p h0 src spos;
writable_replace_subseq_elim dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h0 (Seq.slice (B.as_seq h0 src.base) (U32.v spos) (U32.v spos'));
blit_strong src.base spos dst.base dpos len;
let h = HST.get () in
[@inline_let] let dpos' = dpos `U32.add` len in
parse_strong_prefix p (bytes_of_slice_from h0 src spos) (bytes_of_slice_from h dst dpos);
valid_facts p h dst dpos;
dpos'
#pop-options
inline_for_extraction
let copy_strong'
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\ (
let clen = content_length p h src spos in
U32.v dpos + clen <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + clen) h /\
B.loc_disjoint (loc_slice_from src spos) (loc_slice_from_to dst dpos (dpos `U32.add` (U32.uint_to_t clen)))
)))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
))
= let spos' = j src spos in
copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak_with_length
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= if (dst.len `U32.sub` dpos) `U32.lt` (spos' `U32.sub` spos)
then max_uint32
else copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(jmp: jumper p)
(src: slice rrel1 rel1)
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (content_length p h src spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos (get_valid_pos p h src spos)) (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= let spos' = jmp src spos in
copy_weak_with_length p src spos spos' dst dpos
(* fold_left on lists *)
module BF = LowStar.Buffer
#push-options "--z3rlimit 256 --fuel 1 --ifuel 1"
#restart-solver
inline_for_extraction
let list_fold_left_gen
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(post_interrupt: ((h: HS.mem) -> GTot Type0))
(post_interrupt_frame: (h: HS.mem) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
post_interrupt h
)) (ensures (post_interrupt h')))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
HST.Stack bool
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos1 /\
valid_pos p h0 sl pos1 pos2 /\
valid_list p h0 sl pos2 pos' /\
inv h (contents_list p h0 sl pos pos1) (contents p h0 sl pos1 :: contents_list p h0 sl pos2 pos') pos1
))
(ensures (fun h ctinue h' ->
B.modifies (Ghost.reveal l) h h' /\
(if ctinue then inv h' (contents_list p h0 sl pos pos1 `L.append` [contents p h0 sl pos1]) (contents_list p h0 sl pos2 pos') pos2 else post_interrupt h')
))
))
: HST.Stack bool
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h res h' ->
B.modifies (Ghost.reveal l) h h' /\
(if res then inv h' (contents_list p h sl pos pos') [] pos' else post_interrupt h')
))
= HST.push_frame ();
let h1 = HST.get () in
// B.fresh_frame_modifies h0 h1;
let bpos : BF.pointer U32.t = BF.alloca pos 1ul in
let bctinue : BF.pointer bool = BF.alloca true 1ul in
let btest: BF.pointer bool = BF.alloca (pos `U32.lt` pos') 1ul in
let h2 = HST.get () in
assert (B.modifies B.loc_none h0 h2);
let test_pre (h: HS.mem) : GTot Type0 =
B.live h bpos /\ B.live h bctinue /\ B.live h btest /\ (
let pos1 = Seq.index (B.as_seq h bpos) 0 in
let ctinue = Seq.index (B.as_seq h bctinue) 0 in
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
B.modifies (Ghost.reveal l `B.loc_union` B.loc_region_only true (HS.get_tip h1)) h2 h /\
Seq.index (B.as_seq h btest) 0 == ((U32.v (Seq.index (B.as_seq h bpos) 0) < U32.v pos') && Seq.index (B.as_seq h bctinue) 0) /\
(if ctinue then inv h (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 else post_interrupt h)
)
in
let test_post (cond: bool) (h: HS.mem) : GTot Type0 =
test_pre h /\
cond == Seq.index (B.as_seq h btest) 0
in
valid_list_nil p h0 sl pos;
inv_frame h0 [] (contents_list p h0 sl pos pos') pos h1;
inv_frame h1 [] (contents_list p h0 sl pos pos') pos h2;
[@inline_let]
let while_body () : HST.Stack unit
(requires (fun h -> test_post true h))
(ensures (fun _ _ h1 -> test_pre h1))
=
let h51 = HST.get () in
let pos1 = B.index bpos 0ul in
valid_list_cons_recip p h0 sl pos1 pos';
//assert (B.modifies (Ghost.reveal l `B.loc_union` B.loc_buffer bpos) h0 h51);
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos'));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos pos1));
valid_list_cons_recip p h51 sl pos1 pos';
let pos2 = j sl pos1 in
let h52 = HST.get () in
inv_frame h51 (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 h52;
B.modifies_only_not_unused_in (Ghost.reveal l) h0 h52;
let ctinue = body pos1 pos2 in
let h53 = HST.get () in
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos2 pos'));
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h1));
valid_pos_frame_strong p h0 sl pos1 pos2 (Ghost.reveal l) h53;
valid_list_snoc p h0 sl pos pos1;
B.upd bpos 0ul pos2;
B.upd bctinue 0ul ctinue;
B.upd btest 0ul ((pos2 `U32.lt` pos') && ctinue);
let h54 = HST.get () in
[@inline_let]
let _ =
if ctinue
then inv_frame h53 (contents_list p h0 sl pos pos2) (contents_list p h0 sl pos2 pos') pos2 h54
else post_interrupt_frame h53 h54
in
()
in
C.Loops.while
#test_pre
#test_post
(fun (_: unit) -> (
B.index btest 0ul) <: HST.Stack bool (requires (fun h -> test_pre h)) (ensures (fun h x h1 -> test_post x h1)))
while_body
;
valid_list_nil p h0 sl pos';
let res = B.index bctinue 0ul in
let h3 = HST.get () in
HST.pop_frame ();
let h4 = HST.get () in
//B.popped_modifies h3 h4;
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h3));
[@inline_let]
let _ =
if res
then inv_frame h3 (contents_list p h0 sl pos pos') [] pos' h4
else post_interrupt_frame h3 h4
in
res
#pop-options
//B.loc_includes_union_l (B.loc_all_regions_from false (HS.get_tip h1)) (Ghost.reveal l) (Ghost.reveal l)
//B.modifies_fresh_frame_popped h0 h1 (Ghost.reveal l) h3 h4
module G = FStar.Ghost
inline_for_extraction
let list_fold_left
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
(l1: Ghost.erased (list t)) ->
(x: Ghost.erased t) ->
(l2: Ghost.erased (list t)) ->
HST.Stack unit
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos' /\
valid_content_pos p h0 sl pos1 (G.reveal x) pos2 /\
U32.v pos <= U32.v pos1 /\ U32.v pos2 <= U32.v pos' /\
B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2) /\
inv h (Ghost.reveal l1) (Ghost.reveal x :: Ghost.reveal l2) pos1 /\
contents_list p h0 sl pos pos' == Ghost.reveal l1 `L.append` (Ghost.reveal x :: Ghost.reveal l2)
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (Ghost.reveal l1 `L.append` [contents p h0 sl pos1]) (Ghost.reveal l2) pos2
))
))
: HST.Stack unit
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (contents_list p h sl pos pos') [] pos'
))
= let _ = list_fold_left_gen
p
j
sl
pos pos'
h0
l
inv
inv_frame
(fun _ -> False)
(fun _ _ -> ())
(fun pos1 pos2 ->
let h = HST.get () in
valid_list_cons p h sl pos1 pos';
valid_list_append p h sl pos pos1 pos';
body
pos1
pos2
(Ghost.hide (contents_list p h sl pos pos1))
(Ghost.hide (contents p h sl pos1))
(Ghost.hide (contents_list p h sl pos2 pos'))
;
true
)
in
()
inline_for_extraction
let list_length
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v res == L.length (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
B.fresh_frame_modifies h0 h1;
let blen : BF.pointer U32.t = BF.alloca 0ul 1ul in
let h2 = HST.get () in
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer blen))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_buffer blen) h2 h /\
B.live h blen /\ (
let len = U32.v (Seq.index (B.as_seq h blen) 0) in
len <= U32.v pos1 /\ // necessary to prove that length computations do not overflow
len == L.length l1
))
(fun h l1 l2 pos1 h' ->
B.modifies_only_not_unused_in (B.loc_buffer blen) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
B.upd blen 0ul (B.index blen 0ul `U32.add` 1ul);
Classical.forall_intro_2 (list_length_append #t)
)
;
let len = B.index blen 0ul in
HST.pop_frame ();
len
#push-options "--z3rlimit 32 --fuel 2 --ifuel 1"
inline_for_extraction
let list_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(f: (t -> Tot bool))
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
(x: Ghost.erased t) ->
HST.Stack bool
(requires (fun h -> valid_content p h sl pos (G.reveal x)))
(ensures (fun h res h' -> B.modifies B.loc_none h h' /\ res == f (G.reveal x)))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
(#rrel_out #rel_out: _)
(sl_out : slice rrel_out rel_out)
(pos_out : U32.t)
: HST.Stack U32.t
(requires (fun h ->
U32.v pos_out + U32.v pos' - U32.v pos <= U32.v sl_out.len /\
valid_list p h sl pos pos' /\
B.loc_disjoint (loc_slice_from_to sl pos pos') (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
live_slice h sl_out
))
(ensures (fun h pos_out' h' ->
B.modifies (loc_slice_from_to sl_out pos_out pos_out') h h' /\
U32.v pos_out' - U32.v pos_out <= U32.v pos' - U32.v pos /\
valid_list p h' sl_out pos_out pos_out' /\
contents_list p h' sl_out pos_out pos_out' == L.filter f (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
//B.fresh_frame_modifies h0 h1;
let bpos_out' : BF.pointer U32.t = BF.alloca pos_out 1ul in
let h2 = HST.get () in
let inv (h: HS.mem) (l1 l2: list t) (pos1: U32.t) : GTot Type0 =
B.live h bpos_out' /\ (
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
valid_list p h sl_out pos_out pos_out' /\
contents_list p h sl_out pos_out pos_out' == L.filter f l1 /\
U32.v pos_out' - U32.v pos1 <= U32.v pos_out - U32.v pos // necessary to prove that length computations do not overflow
)
in
valid_list_nil p h2 sl_out pos_out;
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))))
inv
(fun h l1 l2 pos1 h' ->
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies_only_not_unused_in (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
let pos_out1 = B.index bpos_out' 0ul in
list_filter_append f (G.reveal l1) [G.reveal x];
if f' sl pos1 x
then begin
assert (B.loc_includes (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) (loc_slice_from_to sl_out pos_out1 (pos_out1 `U32.add` (pos2 `U32.sub` pos1))));
let h = HST.get () in
writable_weaken sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (U32.v pos_out1) (U32.v pos_out1 + (U32.v pos2 - U32.v pos1));
let pos_out2 = copy_strong p sl pos1 pos2 sl_out pos_out1 in
B.upd bpos_out' 0ul pos_out2;
let h' = HST.get () in
writable_modifies sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (B.loc_region_only true (HS.get_tip h1)) h';
valid_list_nil p h' sl_out pos_out2;
valid_list_cons p h' sl_out pos_out1 pos_out2;
valid_list_append p h' sl_out pos_out pos_out1 pos_out2
end else
L.append_l_nil (L.filter f (G.reveal l1))
)
;
let pos_out' = B.index bpos_out' 0ul in
HST.pop_frame ();
pos_out'
#pop-options
#push-options "--z3rlimit 64 --fuel 2 --ifuel 1"
inline_for_extraction
let list_nth
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(i: U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos' /\
U32.v i < L.length (contents_list p h sl pos pos')
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
valid_list p h sl pos res /\
valid p h sl res /\
valid_list p h sl (get_valid_pos p h sl res) pos' /\
L.length (contents_list p h sl pos res) == U32.v i /\
contents p h sl res == L.index (contents_list p h sl pos pos') (U32.v i)
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bpos1 = BF.alloca pos 1ul in
let bk = BF.alloca 0ul 1ul in
let h2 = HST.get () in
valid_list_nil p h0 sl pos;
let _ : bool = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
let k = Seq.index (B.as_seq h bk) 0 in
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bpos1 /\
B.live h bk /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v k /\
U32.v k <= U32.v i
)
(fun h _ _ _ h' ->
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bpos1);
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bk);
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
let pos1 = Seq.index (B.as_seq h bpos1) 0 in
B.live h bpos1 /\
valid p h0 sl pos1 /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl (get_valid_pos p h0 sl pos1) pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v i /\
contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i)
)
(fun _ _ ->
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 ->
let k = B.index bk 0ul in
if k = i
then begin
B.upd bpos1 0ul pos1;
valid_list_cons_recip p h0 sl pos1 pos';
list_index_append (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') (U32.v i);
valid_list_append p h0 sl pos pos1 pos' ;
assert (contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i));
false
end else begin
B.upd bk 0ul (k `U32.add` 1ul);
let h = HST.get () in
B.modifies_only_not_unused_in B.loc_none h0 h;
valid_list_snoc p h0 sl pos pos1;
assert (valid p h0 sl pos1);
assert (pos2 == get_valid_pos p h0 sl pos1);
assert (valid_list p h0 sl pos pos2);
list_length_append (contents_list p h0 sl pos pos1) [contents p h0 sl pos1];
true
end
)
in
let res = B.index bpos1 0ul in
HST.pop_frame ();
res
inline_for_extraction
let list_find
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(f: (t -> Tot bool)) // should be GTot, but List.find requires Tot
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack bool
(requires (fun h ->
valid p h sl pos
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == f (contents p h sl pos)
))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let l = contents_list p h sl pos pos' in
if res = pos'
then L.find f l == None
else
U32.v pos <= U32.v res /\
valid p h sl res /\ (
let x = contents p h sl res in
U32.v res + content_length p h sl res <= U32.v pos' /\
f x == true /\
L.find f l == Some x
)
)))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bres = BF.alloca 0ul 1ul in
let h2 = HST.get () in
let not_found = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\
valid_list p h0 sl pos1 pos' /\
l2 == contents_list p h0 sl pos1 pos' /\
L.find f (contents_list p h0 sl pos pos') == L.find f l2
)
(fun h _ _ _ h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\ (
let res = Seq.index (B.as_seq h bres) 0 in
U32.v pos <= U32.v res /\
valid p h0 sl res /\ (
let x = contents p h0 sl res in
U32.v res + content_length p h0 sl res <= U32.v pos' /\
f x == true /\
L.find f (contents_list p h0 sl pos pos') == Some x
)))
(fun h h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun pos1 pos2 ->
if f' sl pos1
then begin
B.upd bres 0ul pos1;
false
end
else true
)
in
let res =
if not_found
then pos'
else B.index bres 0ul
in
HST.pop_frame ();
res
#pop-options
let rec list_existsb_find
(#a: Type)
(f: (a -> Tot bool))
(l: list a)
: Lemma
(L.existsb f l == Some? (L.find f l))
= match l with
| [] -> ()
| x :: q ->
if f x
then ()
else list_existsb_find f q
inline_for_extraction
noextract
let list_existsb
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(f: (t -> Tot bool)) // should be GTot, but List.find requires Tot
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack bool
(requires (fun h ->
valid p h sl pos
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == f (contents p h sl pos)
))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack bool
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == L.existsb f (contents_list p h sl pos pos')
))
= let h = HST.get () in
list_existsb_find f (contents_list p h sl pos pos');
let posn = list_find j f f' sl pos pos' in
posn <> pos'
#push-options "--fuel 2 --ifuel 1 --z3rlimit 256 --query_stats"
inline_for_extraction
noextract
let list_flatten_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 { k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0 } )
(f: (t1 -> Tot (list t2))) // should be GTot, but List.Tot.map requires Tot
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1' : U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2: U32.t {
valid_list p1 h0 sl1 pos1 pos1' /\
U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\
U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32
})
(f' : (
(pos1_: U32.t) ->
(pos2_: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\
U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\ (
let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2_ res /\
contents_list p2 h' sl2 pos2_ res == y
)))
))
: HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\ (
let y = List.Tot.flatten (List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1')) in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2 res /\
contents_list p2 h' sl2 pos2 res == y
)))
= let hz = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bpos2_ = BF.alloca pos2 1ul in
let h2 = HST.get () in
valid_list_nil p2 hz sl2 pos2;
let fits = list_fold_left_gen
p1
j1
sl1
pos1 pos1'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2))
(fun h ll lr _ ->
B.modifies (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h /\
B.live h bpos2_ /\ (
let pos2_ = Seq.index (B.as_seq h bpos2_) 0 in
contents_list p1 h0 sl1 pos1 pos1' == ll `List.Tot.append` lr /\
valid_list p2 h sl2 pos2 pos2_ /\
contents_list p2 h sl2 pos2 pos2_ == List.Tot.flatten (List.Tot.map f ll) /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(fun h _ _ _ h' ->
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun h ->
B.modifies (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h /\
U32.v pos2 + serialized_list_length s2 (List.Tot.flatten (List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1'))) > U32.v sl2.len
)
(fun h h' ->
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1l pos1r ->
let pos2_ = B.index bpos2_ 0ul in
let h = HST.get () in
writable_weaken sl2.base (U32.v pos2) (U32.v sl2.len) h (U32.v pos2_) (U32.v sl2.len);
valid_pos_frame_strong p1 h0 sl1 pos1l pos1r (loc_slice_from sl2 pos2) hz;
let res = f' pos1l pos2_ in
let fits = not (res = max_uint32) in
if fits then begin
B.upd bpos2_ 0ul res;
let h' = HST.get () in
writable_modifies sl2.base (U32.v pos2) (U32.v sl2.len) h (B.loc_region_only true (HS.get_tip h1)) h' ;
List.Tot.append_assoc (contents_list p1 h0 sl1 pos1 pos1l) [contents p1 h0 sl1 pos1l] (contents_list p1 h0 sl1 pos1r pos1');
list_flatten_map_append f (contents_list p1 h0 sl1 pos1 pos1l) [contents p1 h0 sl1 pos1l];
valid_list_snoc p1 h0 sl1 pos1 pos1l;
valid_list_append p2 h' sl2 pos2 pos2_ res;
valid_list_nil p2 h' sl2 res;
valid_list_append p2 h' sl2 pos2_ res res
end else begin
let h' = HST.get () in
valid_list_cons p1 h0 sl1 pos1l pos1' ;
valid_list_append p1 h0 sl1 pos1 pos1l pos1' ;
list_flatten_map_append f (contents_list p1 h0 sl1 pos1 pos1l) (contents_list p1 h0 sl1 pos1l pos1');
serialized_list_length_append s2 (L.flatten (L.map f (contents_list p1 h0 sl1 pos1 pos1l))) (L.flatten (L.map f (contents_list p1 h0 sl1 pos1l pos1')));
serialized_list_length_append s2 (f (contents p1 h0 sl1 pos1l)) (L.flatten (L.map f (contents_list p1 h0 sl1 pos1r pos1')));
valid_list_serialized_list_length s2 h' sl2 pos2 pos2_
end;
fits
)
in
let res =
if fits
then B.index bpos2_ 0ul
else max_uint32
in
HST.pop_frame ();
res
#pop-options
#push-options "--z3rlimit 16 --query_stats"
inline_for_extraction
noextract
let list_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 { k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0 } )
(f: (t1 -> Tot t2)) // should be GTot, but List.Tot.map requires Tot
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1' : U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2: U32.t {
valid_list p1 h0 sl1 pos1 pos1' /\
U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\
U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32
})
(f' : (
(pos1_: U32.t) ->
(pos2_: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\
U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\ (
let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_length s2 y > U32.v sl2.len
else
valid_content_pos p2 h' sl2 pos2_ y res
)))
))
: HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\ (
let y = List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1') in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2 res /\
contents_list p2 h' sl2 pos2 res == y
)))
= list_map_list_flatten_map f (contents_list p1 h0 sl1 pos1 pos1');
list_flatten_map
j1
s2
(fun x -> [f x])
h0
sl1 pos1 pos1'
sl2 pos2
(fun pos1 pos2 ->
let res = f' pos1 pos2 in
let h = HST.get () in
if res = max_uint32
then begin
serialized_list_length_nil s2;
serialized_list_length_cons s2 (f (contents p1 h0 sl1 pos1)) []
end
else begin
valid_list_nil p2 h sl2 res;
valid_list_cons p2 h sl2 pos2 res
end;
res
)
(* Example: trivial printers *)
inline_for_extraction
let print_list
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(print: ((#rrel: _) -> (#rel: _) -> (sl: slice rrel rel) -> (pos: U32.t) -> HST.Stack unit (requires (fun h -> valid p h sl pos)) (ensures (fun h _ h' -> B.modifies B.loc_none h h'))))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack unit
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h _ h' ->
B.modifies B.loc_none h h'
))
= let h0 = HST.get () in
list_fold_left
p
j
sl
pos pos'
h0
(Ghost.hide B.loc_none)
(fun _ _ _ _ -> True)
(fun _ _ _ _ _ -> ())
(fun pos1 _ _ _ _ ->
print sl pos1
)
(* Monotonicity *)
inline_for_extraction
let compl_t (t: Type) = U32.t -> t -> U32.t -> Tot (B.spred byte)
let wvalid
(#t: Type) (#k: parser_kind) (p: parser k t) (#rrel #rel: _) (s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
(gpos' : Ghost.erased U32.t)
(gv: Ghost.erased t)
(x: Seq.seq byte)
: GTot prop
=
U32.v pos <= U32.v (Ghost.reveal gpos') /\
U32.v (Ghost.reveal gpos') <= U32.v s.len /\
U32.v s.len <= Seq.length x /\
parse p (Seq.slice x (U32.v pos) (U32.v s.len)) == Some (Ghost.reveal gv, U32.v (Ghost.reveal gpos') - U32.v pos) /\
compl pos (Ghost.reveal gv) (Ghost.reveal gpos') x
let wvalid_valid_content_pos
(#t: Type) (#k: parser_kind) (p: parser k t) (#rrel #rel: _) (s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
(gpos' : Ghost.erased U32.t)
(gv: Ghost.erased t)
(x: Seq.seq byte)
(h: HS.mem)
: Lemma
(requires (
wvalid p s compl pos gpos' gv x /\
live_slice h s /\
x == B.as_seq h s.base
))
(ensures (
valid_content_pos p h s pos gv gpos'
))
=
valid_facts p h s pos
inline_for_extraction
noeq
type irepr (#t: Type) (#k: parser_kind) (p: parser k t) (#rrel #rel: _) (s: slice rrel rel) (compl: compl_t t) =
| IRepr:
(pos: U32.t) ->
(gpos' : Ghost.erased U32.t) ->
(gv: Ghost.erased t) ->
(irepr_correct: squash (
U32.v pos <= U32.v (Ghost.reveal gpos') /\
U32.v (Ghost.reveal gpos') <= U32.v s.len /\
B.witnessed s.base (wvalid p s compl pos gpos' gv)
)) ->
irepr p s compl
inline_for_extraction
let irepr_pos
(#t: Type) (#k: parser_kind) (#p: parser k t) (#rrel #rel: _) (#s: slice rrel rel) (#compl: compl_t t) (x: irepr p s compl) : Tot U32.t =
IRepr?.pos x
let irepr_pos'
(#t: Type) (#k: parser_kind) (#p: parser k t) (#rrel #rel: _) (#s: slice rrel rel) (#compl: compl_t t) (x: irepr p s compl) : Ghost U32.t
(requires True)
(ensures (fun y -> True))
= Ghost.reveal (IRepr?.gpos' x)
#push-options "--ifuel 1 --fuel 2"
let irepr_pos'_post
(#t: Type) (#k: parser_kind) (#p: parser k t) (#rrel #rel: _) (#s: slice rrel rel) (#compl: compl_t t) (x: irepr p s compl) : Lemma
(requires True)
(ensures (
let y = irepr_pos' x in
U32.v (irepr_pos x) <= U32.v y /\ U32.v y <= U32.v s.len
))
[SMTPat (irepr_pos' x)]
= ()
let irepr_v
(#t: Type) (#k: parser_kind) (#p: parser k t) (#rrel #rel: _) (#s: slice rrel rel) (#compl: compl_t t) (x: irepr p s compl) : GTot t
= Ghost.reveal (IRepr?.gv x)
inline_for_extraction
let witness_valid_gen
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
: HST.Stack (irepr p s compl)
(requires (fun h ->
valid p h s pos /\
B.stable_on (wvalid p s compl pos (Ghost.hide (get_valid_pos p h s pos)) (Ghost.hide (contents p h s pos))) (buffer_srel_of_srel rel) /\
compl pos (contents p h s pos) (get_valid_pos p h s pos) (B.as_seq h s.base)
))
(ensures (fun h res h' ->
h' == h /\
irepr_pos res == pos /\
valid_content_pos p h s pos (irepr_v res) (irepr_pos' res) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 2,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val witness_valid_gen
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
: HST.Stack (irepr p s compl)
(requires
(fun h ->
valid p h s pos /\
B.stable_on (wvalid p
s
compl
pos
(Ghost.hide (get_valid_pos p h s pos))
(Ghost.hide (contents p h s pos)))
(buffer_srel_of_srel rel) /\
compl pos (contents p h s pos) (get_valid_pos p h s pos) (B.as_seq h s.base)))
(ensures
(fun h res h' ->
h' == h /\ irepr_pos res == pos /\
valid_content_pos p h s pos (irepr_v res) (irepr_pos' res))) | [] | LowParse.Low.Base.witness_valid_gen | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | s: LowParse.Slice.slice rrel rel -> compl: LowParse.Low.Base.compl_t t -> pos: FStar.UInt32.t
-> FStar.HyperStack.ST.Stack (LowParse.Low.Base.irepr p s compl) | {
"end_col": 23,
"end_line": 2114,
"start_col": 1,
"start_line": 2106
} |
FStar.HyperStack.ST.Stack | val blit_strong
(#a: Type)
(#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src: U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst len: U32.t)
: HST.Stack unit
(requires
(fun h ->
B.live h src /\ B.live h dst /\ U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len))
(B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst)
(U32.v idx_dst)
(U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures
(fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len))) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let blit_strong
(#a:Type) (#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src:U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst:U32.t)
(len:U32.t)
: HST.Stack unit
(requires (fun h ->
B.live h src /\ B.live h dst /\
U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len)) (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)
))
= let h = HST.get () in
B.blit src idx_src dst idx_dst len;
let h' = HST.get () in
B.modifies_loc_buffer_from_to_intro dst idx_dst (idx_dst `U32.add` len) B.loc_none h h' | val blit_strong
(#a: Type)
(#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src: U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst len: U32.t)
: HST.Stack unit
(requires
(fun h ->
B.live h src /\ B.live h dst /\ U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len))
(B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst)
(U32.v idx_dst)
(U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures
(fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))
let blit_strong
(#a: Type)
(#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src: U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst len: U32.t)
: HST.Stack unit
(requires
(fun h ->
B.live h src /\ B.live h dst /\ U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len))
(B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst)
(U32.v idx_dst)
(U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures
(fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len))) = | true | null | false | let h = HST.get () in
B.blit src idx_src dst idx_dst len;
let h' = HST.get () in
B.modifies_loc_buffer_from_to_intro dst idx_dst (idx_dst `U32.add` len) B.loc_none h h' | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.modifies_loc_buffer_from_to_intro",
"FStar.UInt32.add",
"LowStar.Monotonic.Buffer.loc_none",
"Prims.unit",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowStar.Monotonic.Buffer.blit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.live",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.length",
"LowStar.Monotonic.Buffer.loc_disjoint",
"LowStar.Monotonic.Buffer.loc_buffer_from_to",
"LowStar.Monotonic.Buffer.as_seq",
"FStar.Seq.Properties.replace_subseq",
"FStar.Seq.Base.slice",
"LowStar.Monotonic.Buffer.modifies",
"Prims.eq2",
"FStar.Seq.Base.seq"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_weak
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
U32.v sl.len < U32.v max_uint32 /\
writable sl.base (U32.v pos) (U32.v sl.len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from sl pos) h h' /\ (
if pos' = max_uint32
then U32.v pos + serialized_length s x > U32.v sl.len
else valid_content_pos p h' sl pos x pos'
)))
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_strong
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let sq = B.as_seq h sl.base in
let len = serialized_length s x in
live_slice h sl /\
U32.v pos + len <= U32.v sl.len /\
writable sl.base (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from_to sl pos pos') h h' /\
valid_content_pos p h' sl pos x pos'
))
[@unifier_hint_injective]
inline_for_extraction
let serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(b: B.mbuffer byte rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v pos + len <= B.length b /\
writable b (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b pos (pos `U32.add` len)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x
)))
inline_for_extraction
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
= fun x #rrel #rel b pos -> s1' x b pos
inline_for_extraction
let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(posl: Ghost.erased U32.t)
(posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\
U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos) /\
Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr))
)))
=
let h0 = HST.get () in
writable_weaken b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 (U32.v pos) (U32.v pos + Seq.length (serialize s x));
let res = s32 x b pos in
let h1 = HST.get () in
let pos' = pos `U32.add` res in
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos pos';
writable_modifies b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 B.loc_none h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) (Ghost.reveal posl) pos;
B.loc_disjoint_loc_buffer_from_to b (Ghost.reveal posl) pos pos pos';
B.modifies_buffer_from_to_elim b (Ghost.reveal posl) pos (B.loc_buffer_from_to b pos pos') h0 h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos' (Ghost.reveal posr);
B.loc_disjoint_loc_buffer_from_to b pos pos' pos' (Ghost.reveal posr);
B.modifies_buffer_from_to_elim b pos' (Ghost.reveal posr) (B.loc_buffer_from_to b pos pos') h0 h1;
res
inline_for_extraction
let leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s)
= fun x #rrel #rel input pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let len = s32 x input.base pos in
[@inline_let]
let pos' = pos `U32.add` len in
let h = HST.get () in
[@inline_let] let _ =
let large = bytes_of_slice_from h input pos in
let small = bytes_of_slice_from_to h input pos pos' in
parse_strong_prefix p small large;
valid_facts p h input pos
in
pos'
inline_for_extraction
let leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32
))
: Tot (leaf_writer_weak s)
= fun x #rrel #rel input pos ->
if (input.len `U32.sub` pos) `U32.lt` sz
then max_uint32
else begin
let h = HST.get () in
writable_weaken input.base (U32.v pos) (U32.v input.len) h (U32.v pos) (U32.v pos + U32.v sz);
s32 x input pos
end
inline_for_extraction
let serializer32_of_leaf_writer_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_subkind == Some ParserStrong /\
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
))
: Tot (serializer32 s)
= fun x #rrel #rel b pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let pos' = s32 x (make_slice b (pos `U32.add` sz)) pos in
[@inline_let]
let len = pos' `U32.sub` pos in
let h = HST.get () in
[@inline_let] let _ =
valid_valid_exact p h (make_slice b (pos `U32.add` sz)) pos;
valid_exact_serialize s h (make_slice b (pos `U32.add` sz)) pos pos'
in
len
inline_for_extraction
let blit_strong
(#a:Type) (#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src:U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst:U32.t)
(len:U32.t)
: HST.Stack unit
(requires (fun h ->
B.live h src /\ B.live h dst /\
U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len)) (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val blit_strong
(#a: Type)
(#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src: U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst len: U32.t)
: HST.Stack unit
(requires
(fun h ->
B.live h src /\ B.live h dst /\ U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len))
(B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst)
(U32.v idx_dst)
(U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures
(fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len))) | [] | LowParse.Low.Base.blit_strong | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
src: LowStar.Monotonic.Buffer.mbuffer a rrel1 rel1 ->
idx_src: FStar.UInt32.t ->
dst: LowStar.Monotonic.Buffer.mbuffer a rrel2 rel2 ->
idx_dst: FStar.UInt32.t ->
len: FStar.UInt32.t
-> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 89,
"end_line": 1009,
"start_col": 1,
"start_line": 1006
} |
FStar.Pervasives.Lemma | val writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': nat)
(h: HS.mem)
(lpos lpos': nat)
: Lemma
(requires
(writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\
pos' <= B.length b)) (ensures (writable b lpos lpos' h)) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
) | val writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': nat)
(h: HS.mem)
(lpos lpos': nat)
: Lemma
(requires
(writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\
pos' <= B.length b)) (ensures (writable b lpos lpos' h))
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': nat)
(h: HS.mem)
(lpos lpos': nat)
: Lemma
(requires
(writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\
pos' <= B.length b)) (ensures (writable b lpos lpos' h)) = | false | null | true | writable_intro b
lpos
lpos'
h
()
(fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 =
Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1)
in
let j2 =
Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2)
in
assert ((Seq.replace_subseq s lpos lpos' s1) `Seq.equal` j1);
assert ((Seq.replace_subseq s lpos lpos' s2) `Seq.equal` j2);
assert (j1 `rel` j2)) | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.nat",
"FStar.Monotonic.HyperStack.mem",
"LowParse.Low.Base.writable_intro",
"FStar.Seq.Properties.lseq",
"Prims.op_Subtraction",
"Prims._assert",
"Prims.unit",
"FStar.Seq.Base.equal",
"FStar.Seq.Properties.replace_subseq",
"FStar.Seq.Base.seq",
"FStar.Seq.Base.slice",
"LowStar.Monotonic.Buffer.as_seq",
"Prims.l_and",
"LowParse.Low.Base.writable",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"LowStar.Monotonic.Buffer.length",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b)) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos': nat)
(h: HS.mem)
(lpos lpos': nat)
: Lemma
(requires
(writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\
pos' <= B.length b)) (ensures (writable b lpos lpos' h)) | [] | LowParse.Low.Base.writable_weaken | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
b: LowStar.Monotonic.Buffer.mbuffer t rrel rel ->
pos: Prims.nat ->
pos': Prims.nat ->
h: FStar.Monotonic.HyperStack.mem ->
lpos: Prims.nat ->
lpos': Prims.nat
-> FStar.Pervasives.Lemma
(requires
LowParse.Low.Base.writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\
pos' <= LowStar.Monotonic.Buffer.length b)
(ensures LowParse.Low.Base.writable b lpos lpos' h) | {
"end_col": 3,
"end_line": 598,
"start_col": 2,
"start_line": 590
} |
FStar.HyperStack.ST.Stack | val list_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 {k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0})
(f: (t1 -> Tot t2))
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1': U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2:
U32.t
{ valid_list p1 h0 sl1 pos1 pos1' /\ U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\ U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32 })
(f':
(pos1_: U32.t -> pos2_: U32.t
-> HST.Stack U32.t
(requires
(fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\ valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\ U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h))
(ensures
(fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\
(let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_length s2 y > U32.v sl2.len
else valid_content_pos p2 h' sl2 pos2_ y res)))))
: HST.Stack U32.t
(requires
(fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\ live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h))
(ensures
(fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\
(let y = List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1') in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else valid_list p2 h' sl2 pos2 res /\ contents_list p2 h' sl2 pos2 res == y))) | [
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "BF"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let list_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 { k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0 } )
(f: (t1 -> Tot t2)) // should be GTot, but List.Tot.map requires Tot
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1' : U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2: U32.t {
valid_list p1 h0 sl1 pos1 pos1' /\
U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\
U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32
})
(f' : (
(pos1_: U32.t) ->
(pos2_: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\
U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\ (
let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_length s2 y > U32.v sl2.len
else
valid_content_pos p2 h' sl2 pos2_ y res
)))
))
: HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\ (
let y = List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1') in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2 res /\
contents_list p2 h' sl2 pos2 res == y
)))
= list_map_list_flatten_map f (contents_list p1 h0 sl1 pos1 pos1');
list_flatten_map
j1
s2
(fun x -> [f x])
h0
sl1 pos1 pos1'
sl2 pos2
(fun pos1 pos2 ->
let res = f' pos1 pos2 in
let h = HST.get () in
if res = max_uint32
then begin
serialized_list_length_nil s2;
serialized_list_length_cons s2 (f (contents p1 h0 sl1 pos1)) []
end
else begin
valid_list_nil p2 h sl2 res;
valid_list_cons p2 h sl2 pos2 res
end;
res
) | val list_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 {k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0})
(f: (t1 -> Tot t2))
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1': U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2:
U32.t
{ valid_list p1 h0 sl1 pos1 pos1' /\ U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\ U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32 })
(f':
(pos1_: U32.t -> pos2_: U32.t
-> HST.Stack U32.t
(requires
(fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\ valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\ U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h))
(ensures
(fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\
(let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_length s2 y > U32.v sl2.len
else valid_content_pos p2 h' sl2 pos2_ y res)))))
: HST.Stack U32.t
(requires
(fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\ live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h))
(ensures
(fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\
(let y = List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1') in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else valid_list p2 h' sl2 pos2 res /\ contents_list p2 h' sl2 pos2 res == y)))
let list_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 {k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0})
(f: (t1 -> Tot t2))
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1': U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2:
U32.t
{ valid_list p1 h0 sl1 pos1 pos1' /\ U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\ U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32 })
(f':
(pos1_: U32.t -> pos2_: U32.t
-> HST.Stack U32.t
(requires
(fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\ valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\ U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h))
(ensures
(fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\
(let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_length s2 y > U32.v sl2.len
else valid_content_pos p2 h' sl2 pos2_ y res)))))
: HST.Stack U32.t
(requires
(fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\ live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h))
(ensures
(fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\
(let y = List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1') in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else valid_list p2 h' sl2 pos2 res /\ contents_list p2 h' sl2 pos2 res == y))) = | true | null | false | list_map_list_flatten_map f (contents_list p1 h0 sl1 pos1 pos1');
list_flatten_map j1 s2 (fun x -> [f x]) h0 sl1 pos1 pos1' sl2 pos2
(fun pos1 pos2 ->
let res = f' pos1 pos2 in
let h = HST.get () in
if res = max_uint32
then
(serialized_list_length_nil s2;
serialized_list_length_cons s2 (f (contents p1 h0 sl1 pos1)) [])
else
(valid_list_nil p2 h sl2 res;
valid_list_cons p2 h sl2 pos2 res);
res) | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Low.Base.jumper",
"LowParse.Spec.Base.serializer",
"Prims.l_and",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"Prims.b2t",
"Prims.op_GreaterThan",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_low",
"FStar.Monotonic.HyperStack.mem",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"FStar.UInt32.t",
"LowParse.Low.Base.Spec.valid_list",
"Prims.op_LessThanOrEqual",
"FStar.UInt32.v",
"LowParse.Slice.__proj__Mkslice__item__len",
"LowStar.Monotonic.Buffer.loc_disjoint",
"LowParse.Slice.loc_slice_from_to",
"LowParse.Slice.loc_slice_from",
"Prims.op_LessThan",
"LowParse.Low.ErrorCode.max_uint32",
"LowStar.Monotonic.Buffer.modifies",
"LowParse.Low.Base.Spec.valid",
"Prims.op_Addition",
"LowParse.Low.Base.Spec.content_length",
"LowParse.Slice.live_slice",
"LowParse.Low.Base.writable",
"LowParse.Slice.buffer_srel_of_srel",
"LowParse.Slice.__proj__Mkslice__item__base",
"Prims.op_Equality",
"LowParse.Low.Base.Spec.serialized_length",
"Prims.bool",
"LowParse.Low.Base.Spec.valid_content_pos",
"Prims.logical",
"LowParse.Low.Base.Spec.contents",
"LowParse.Low.Base.list_flatten_map",
"Prims.Cons",
"Prims.Nil",
"Prims.list",
"Prims.unit",
"LowParse.Low.Base.Spec.serialized_list_length_cons",
"LowParse.Low.Base.Spec.serialized_list_length_nil",
"LowParse.Low.Base.Spec.valid_list_cons",
"LowParse.Low.Base.Spec.valid_list_nil",
"FStar.HyperStack.ST.get",
"LowParse.Low.Base.Spec.list_map_list_flatten_map",
"LowParse.Low.Base.Spec.contents_list",
"LowParse.Low.Base.Spec.serialized_list_length",
"FStar.List.Tot.Base.map"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_weak
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
U32.v sl.len < U32.v max_uint32 /\
writable sl.base (U32.v pos) (U32.v sl.len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from sl pos) h h' /\ (
if pos' = max_uint32
then U32.v pos + serialized_length s x > U32.v sl.len
else valid_content_pos p h' sl pos x pos'
)))
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_strong
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let sq = B.as_seq h sl.base in
let len = serialized_length s x in
live_slice h sl /\
U32.v pos + len <= U32.v sl.len /\
writable sl.base (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from_to sl pos pos') h h' /\
valid_content_pos p h' sl pos x pos'
))
[@unifier_hint_injective]
inline_for_extraction
let serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(b: B.mbuffer byte rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v pos + len <= B.length b /\
writable b (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b pos (pos `U32.add` len)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x
)))
inline_for_extraction
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
= fun x #rrel #rel b pos -> s1' x b pos
inline_for_extraction
let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(posl: Ghost.erased U32.t)
(posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\
U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos) /\
Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr))
)))
=
let h0 = HST.get () in
writable_weaken b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 (U32.v pos) (U32.v pos + Seq.length (serialize s x));
let res = s32 x b pos in
let h1 = HST.get () in
let pos' = pos `U32.add` res in
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos pos';
writable_modifies b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 B.loc_none h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) (Ghost.reveal posl) pos;
B.loc_disjoint_loc_buffer_from_to b (Ghost.reveal posl) pos pos pos';
B.modifies_buffer_from_to_elim b (Ghost.reveal posl) pos (B.loc_buffer_from_to b pos pos') h0 h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos' (Ghost.reveal posr);
B.loc_disjoint_loc_buffer_from_to b pos pos' pos' (Ghost.reveal posr);
B.modifies_buffer_from_to_elim b pos' (Ghost.reveal posr) (B.loc_buffer_from_to b pos pos') h0 h1;
res
inline_for_extraction
let leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s)
= fun x #rrel #rel input pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let len = s32 x input.base pos in
[@inline_let]
let pos' = pos `U32.add` len in
let h = HST.get () in
[@inline_let] let _ =
let large = bytes_of_slice_from h input pos in
let small = bytes_of_slice_from_to h input pos pos' in
parse_strong_prefix p small large;
valid_facts p h input pos
in
pos'
inline_for_extraction
let leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32
))
: Tot (leaf_writer_weak s)
= fun x #rrel #rel input pos ->
if (input.len `U32.sub` pos) `U32.lt` sz
then max_uint32
else begin
let h = HST.get () in
writable_weaken input.base (U32.v pos) (U32.v input.len) h (U32.v pos) (U32.v pos + U32.v sz);
s32 x input pos
end
inline_for_extraction
let serializer32_of_leaf_writer_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_subkind == Some ParserStrong /\
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
))
: Tot (serializer32 s)
= fun x #rrel #rel b pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let pos' = s32 x (make_slice b (pos `U32.add` sz)) pos in
[@inline_let]
let len = pos' `U32.sub` pos in
let h = HST.get () in
[@inline_let] let _ =
valid_valid_exact p h (make_slice b (pos `U32.add` sz)) pos;
valid_exact_serialize s h (make_slice b (pos `U32.add` sz)) pos pos'
in
len
inline_for_extraction
let blit_strong
(#a:Type) (#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src:U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst:U32.t)
(len:U32.t)
: HST.Stack unit
(requires (fun h ->
B.live h src /\ B.live h dst /\
U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len)) (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)
))
= let h = HST.get () in
B.blit src idx_src dst idx_dst len;
let h' = HST.get () in
B.modifies_loc_buffer_from_to_intro dst idx_dst (idx_dst `U32.add` len) B.loc_none h h'
#push-options "--z3rlimit 16"
inline_for_extraction
let copy_strong
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
U32.v dpos + U32.v spos' - U32.v spos <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from_to dst dpos (dpos `U32.add` (spos' `U32.sub` spos)))
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos' /\
dpos' `U32.sub` dpos == spos' `U32.sub` spos
))
= let h0 = HST.get () in
let len = spos' `U32.sub` spos in
valid_facts p h0 src spos;
writable_replace_subseq_elim dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h0 (Seq.slice (B.as_seq h0 src.base) (U32.v spos) (U32.v spos'));
blit_strong src.base spos dst.base dpos len;
let h = HST.get () in
[@inline_let] let dpos' = dpos `U32.add` len in
parse_strong_prefix p (bytes_of_slice_from h0 src spos) (bytes_of_slice_from h dst dpos);
valid_facts p h dst dpos;
dpos'
#pop-options
inline_for_extraction
let copy_strong'
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\ (
let clen = content_length p h src spos in
U32.v dpos + clen <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + clen) h /\
B.loc_disjoint (loc_slice_from src spos) (loc_slice_from_to dst dpos (dpos `U32.add` (U32.uint_to_t clen)))
)))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
))
= let spos' = j src spos in
copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak_with_length
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= if (dst.len `U32.sub` dpos) `U32.lt` (spos' `U32.sub` spos)
then max_uint32
else copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(jmp: jumper p)
(src: slice rrel1 rel1)
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (content_length p h src spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos (get_valid_pos p h src spos)) (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= let spos' = jmp src spos in
copy_weak_with_length p src spos spos' dst dpos
(* fold_left on lists *)
module BF = LowStar.Buffer
#push-options "--z3rlimit 256 --fuel 1 --ifuel 1"
#restart-solver
inline_for_extraction
let list_fold_left_gen
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(post_interrupt: ((h: HS.mem) -> GTot Type0))
(post_interrupt_frame: (h: HS.mem) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
post_interrupt h
)) (ensures (post_interrupt h')))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
HST.Stack bool
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos1 /\
valid_pos p h0 sl pos1 pos2 /\
valid_list p h0 sl pos2 pos' /\
inv h (contents_list p h0 sl pos pos1) (contents p h0 sl pos1 :: contents_list p h0 sl pos2 pos') pos1
))
(ensures (fun h ctinue h' ->
B.modifies (Ghost.reveal l) h h' /\
(if ctinue then inv h' (contents_list p h0 sl pos pos1 `L.append` [contents p h0 sl pos1]) (contents_list p h0 sl pos2 pos') pos2 else post_interrupt h')
))
))
: HST.Stack bool
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h res h' ->
B.modifies (Ghost.reveal l) h h' /\
(if res then inv h' (contents_list p h sl pos pos') [] pos' else post_interrupt h')
))
= HST.push_frame ();
let h1 = HST.get () in
// B.fresh_frame_modifies h0 h1;
let bpos : BF.pointer U32.t = BF.alloca pos 1ul in
let bctinue : BF.pointer bool = BF.alloca true 1ul in
let btest: BF.pointer bool = BF.alloca (pos `U32.lt` pos') 1ul in
let h2 = HST.get () in
assert (B.modifies B.loc_none h0 h2);
let test_pre (h: HS.mem) : GTot Type0 =
B.live h bpos /\ B.live h bctinue /\ B.live h btest /\ (
let pos1 = Seq.index (B.as_seq h bpos) 0 in
let ctinue = Seq.index (B.as_seq h bctinue) 0 in
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
B.modifies (Ghost.reveal l `B.loc_union` B.loc_region_only true (HS.get_tip h1)) h2 h /\
Seq.index (B.as_seq h btest) 0 == ((U32.v (Seq.index (B.as_seq h bpos) 0) < U32.v pos') && Seq.index (B.as_seq h bctinue) 0) /\
(if ctinue then inv h (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 else post_interrupt h)
)
in
let test_post (cond: bool) (h: HS.mem) : GTot Type0 =
test_pre h /\
cond == Seq.index (B.as_seq h btest) 0
in
valid_list_nil p h0 sl pos;
inv_frame h0 [] (contents_list p h0 sl pos pos') pos h1;
inv_frame h1 [] (contents_list p h0 sl pos pos') pos h2;
[@inline_let]
let while_body () : HST.Stack unit
(requires (fun h -> test_post true h))
(ensures (fun _ _ h1 -> test_pre h1))
=
let h51 = HST.get () in
let pos1 = B.index bpos 0ul in
valid_list_cons_recip p h0 sl pos1 pos';
//assert (B.modifies (Ghost.reveal l `B.loc_union` B.loc_buffer bpos) h0 h51);
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos'));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos pos1));
valid_list_cons_recip p h51 sl pos1 pos';
let pos2 = j sl pos1 in
let h52 = HST.get () in
inv_frame h51 (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 h52;
B.modifies_only_not_unused_in (Ghost.reveal l) h0 h52;
let ctinue = body pos1 pos2 in
let h53 = HST.get () in
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos2 pos'));
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h1));
valid_pos_frame_strong p h0 sl pos1 pos2 (Ghost.reveal l) h53;
valid_list_snoc p h0 sl pos pos1;
B.upd bpos 0ul pos2;
B.upd bctinue 0ul ctinue;
B.upd btest 0ul ((pos2 `U32.lt` pos') && ctinue);
let h54 = HST.get () in
[@inline_let]
let _ =
if ctinue
then inv_frame h53 (contents_list p h0 sl pos pos2) (contents_list p h0 sl pos2 pos') pos2 h54
else post_interrupt_frame h53 h54
in
()
in
C.Loops.while
#test_pre
#test_post
(fun (_: unit) -> (
B.index btest 0ul) <: HST.Stack bool (requires (fun h -> test_pre h)) (ensures (fun h x h1 -> test_post x h1)))
while_body
;
valid_list_nil p h0 sl pos';
let res = B.index bctinue 0ul in
let h3 = HST.get () in
HST.pop_frame ();
let h4 = HST.get () in
//B.popped_modifies h3 h4;
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h3));
[@inline_let]
let _ =
if res
then inv_frame h3 (contents_list p h0 sl pos pos') [] pos' h4
else post_interrupt_frame h3 h4
in
res
#pop-options
//B.loc_includes_union_l (B.loc_all_regions_from false (HS.get_tip h1)) (Ghost.reveal l) (Ghost.reveal l)
//B.modifies_fresh_frame_popped h0 h1 (Ghost.reveal l) h3 h4
module G = FStar.Ghost
inline_for_extraction
let list_fold_left
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
(l1: Ghost.erased (list t)) ->
(x: Ghost.erased t) ->
(l2: Ghost.erased (list t)) ->
HST.Stack unit
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos' /\
valid_content_pos p h0 sl pos1 (G.reveal x) pos2 /\
U32.v pos <= U32.v pos1 /\ U32.v pos2 <= U32.v pos' /\
B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2) /\
inv h (Ghost.reveal l1) (Ghost.reveal x :: Ghost.reveal l2) pos1 /\
contents_list p h0 sl pos pos' == Ghost.reveal l1 `L.append` (Ghost.reveal x :: Ghost.reveal l2)
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (Ghost.reveal l1 `L.append` [contents p h0 sl pos1]) (Ghost.reveal l2) pos2
))
))
: HST.Stack unit
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (contents_list p h sl pos pos') [] pos'
))
= let _ = list_fold_left_gen
p
j
sl
pos pos'
h0
l
inv
inv_frame
(fun _ -> False)
(fun _ _ -> ())
(fun pos1 pos2 ->
let h = HST.get () in
valid_list_cons p h sl pos1 pos';
valid_list_append p h sl pos pos1 pos';
body
pos1
pos2
(Ghost.hide (contents_list p h sl pos pos1))
(Ghost.hide (contents p h sl pos1))
(Ghost.hide (contents_list p h sl pos2 pos'))
;
true
)
in
()
inline_for_extraction
let list_length
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v res == L.length (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
B.fresh_frame_modifies h0 h1;
let blen : BF.pointer U32.t = BF.alloca 0ul 1ul in
let h2 = HST.get () in
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer blen))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_buffer blen) h2 h /\
B.live h blen /\ (
let len = U32.v (Seq.index (B.as_seq h blen) 0) in
len <= U32.v pos1 /\ // necessary to prove that length computations do not overflow
len == L.length l1
))
(fun h l1 l2 pos1 h' ->
B.modifies_only_not_unused_in (B.loc_buffer blen) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
B.upd blen 0ul (B.index blen 0ul `U32.add` 1ul);
Classical.forall_intro_2 (list_length_append #t)
)
;
let len = B.index blen 0ul in
HST.pop_frame ();
len
#push-options "--z3rlimit 32 --fuel 2 --ifuel 1"
inline_for_extraction
let list_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(f: (t -> Tot bool))
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
(x: Ghost.erased t) ->
HST.Stack bool
(requires (fun h -> valid_content p h sl pos (G.reveal x)))
(ensures (fun h res h' -> B.modifies B.loc_none h h' /\ res == f (G.reveal x)))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
(#rrel_out #rel_out: _)
(sl_out : slice rrel_out rel_out)
(pos_out : U32.t)
: HST.Stack U32.t
(requires (fun h ->
U32.v pos_out + U32.v pos' - U32.v pos <= U32.v sl_out.len /\
valid_list p h sl pos pos' /\
B.loc_disjoint (loc_slice_from_to sl pos pos') (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
live_slice h sl_out
))
(ensures (fun h pos_out' h' ->
B.modifies (loc_slice_from_to sl_out pos_out pos_out') h h' /\
U32.v pos_out' - U32.v pos_out <= U32.v pos' - U32.v pos /\
valid_list p h' sl_out pos_out pos_out' /\
contents_list p h' sl_out pos_out pos_out' == L.filter f (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
//B.fresh_frame_modifies h0 h1;
let bpos_out' : BF.pointer U32.t = BF.alloca pos_out 1ul in
let h2 = HST.get () in
let inv (h: HS.mem) (l1 l2: list t) (pos1: U32.t) : GTot Type0 =
B.live h bpos_out' /\ (
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
valid_list p h sl_out pos_out pos_out' /\
contents_list p h sl_out pos_out pos_out' == L.filter f l1 /\
U32.v pos_out' - U32.v pos1 <= U32.v pos_out - U32.v pos // necessary to prove that length computations do not overflow
)
in
valid_list_nil p h2 sl_out pos_out;
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))))
inv
(fun h l1 l2 pos1 h' ->
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies_only_not_unused_in (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
let pos_out1 = B.index bpos_out' 0ul in
list_filter_append f (G.reveal l1) [G.reveal x];
if f' sl pos1 x
then begin
assert (B.loc_includes (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) (loc_slice_from_to sl_out pos_out1 (pos_out1 `U32.add` (pos2 `U32.sub` pos1))));
let h = HST.get () in
writable_weaken sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (U32.v pos_out1) (U32.v pos_out1 + (U32.v pos2 - U32.v pos1));
let pos_out2 = copy_strong p sl pos1 pos2 sl_out pos_out1 in
B.upd bpos_out' 0ul pos_out2;
let h' = HST.get () in
writable_modifies sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (B.loc_region_only true (HS.get_tip h1)) h';
valid_list_nil p h' sl_out pos_out2;
valid_list_cons p h' sl_out pos_out1 pos_out2;
valid_list_append p h' sl_out pos_out pos_out1 pos_out2
end else
L.append_l_nil (L.filter f (G.reveal l1))
)
;
let pos_out' = B.index bpos_out' 0ul in
HST.pop_frame ();
pos_out'
#pop-options
#push-options "--z3rlimit 64 --fuel 2 --ifuel 1"
inline_for_extraction
let list_nth
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(i: U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos' /\
U32.v i < L.length (contents_list p h sl pos pos')
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
valid_list p h sl pos res /\
valid p h sl res /\
valid_list p h sl (get_valid_pos p h sl res) pos' /\
L.length (contents_list p h sl pos res) == U32.v i /\
contents p h sl res == L.index (contents_list p h sl pos pos') (U32.v i)
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bpos1 = BF.alloca pos 1ul in
let bk = BF.alloca 0ul 1ul in
let h2 = HST.get () in
valid_list_nil p h0 sl pos;
let _ : bool = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
let k = Seq.index (B.as_seq h bk) 0 in
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bpos1 /\
B.live h bk /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v k /\
U32.v k <= U32.v i
)
(fun h _ _ _ h' ->
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bpos1);
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bk);
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
let pos1 = Seq.index (B.as_seq h bpos1) 0 in
B.live h bpos1 /\
valid p h0 sl pos1 /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl (get_valid_pos p h0 sl pos1) pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v i /\
contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i)
)
(fun _ _ ->
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 ->
let k = B.index bk 0ul in
if k = i
then begin
B.upd bpos1 0ul pos1;
valid_list_cons_recip p h0 sl pos1 pos';
list_index_append (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') (U32.v i);
valid_list_append p h0 sl pos pos1 pos' ;
assert (contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i));
false
end else begin
B.upd bk 0ul (k `U32.add` 1ul);
let h = HST.get () in
B.modifies_only_not_unused_in B.loc_none h0 h;
valid_list_snoc p h0 sl pos pos1;
assert (valid p h0 sl pos1);
assert (pos2 == get_valid_pos p h0 sl pos1);
assert (valid_list p h0 sl pos pos2);
list_length_append (contents_list p h0 sl pos pos1) [contents p h0 sl pos1];
true
end
)
in
let res = B.index bpos1 0ul in
HST.pop_frame ();
res
inline_for_extraction
let list_find
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(f: (t -> Tot bool)) // should be GTot, but List.find requires Tot
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack bool
(requires (fun h ->
valid p h sl pos
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == f (contents p h sl pos)
))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let l = contents_list p h sl pos pos' in
if res = pos'
then L.find f l == None
else
U32.v pos <= U32.v res /\
valid p h sl res /\ (
let x = contents p h sl res in
U32.v res + content_length p h sl res <= U32.v pos' /\
f x == true /\
L.find f l == Some x
)
)))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bres = BF.alloca 0ul 1ul in
let h2 = HST.get () in
let not_found = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\
valid_list p h0 sl pos1 pos' /\
l2 == contents_list p h0 sl pos1 pos' /\
L.find f (contents_list p h0 sl pos pos') == L.find f l2
)
(fun h _ _ _ h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\ (
let res = Seq.index (B.as_seq h bres) 0 in
U32.v pos <= U32.v res /\
valid p h0 sl res /\ (
let x = contents p h0 sl res in
U32.v res + content_length p h0 sl res <= U32.v pos' /\
f x == true /\
L.find f (contents_list p h0 sl pos pos') == Some x
)))
(fun h h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun pos1 pos2 ->
if f' sl pos1
then begin
B.upd bres 0ul pos1;
false
end
else true
)
in
let res =
if not_found
then pos'
else B.index bres 0ul
in
HST.pop_frame ();
res
#pop-options
let rec list_existsb_find
(#a: Type)
(f: (a -> Tot bool))
(l: list a)
: Lemma
(L.existsb f l == Some? (L.find f l))
= match l with
| [] -> ()
| x :: q ->
if f x
then ()
else list_existsb_find f q
inline_for_extraction
noextract
let list_existsb
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(f: (t -> Tot bool)) // should be GTot, but List.find requires Tot
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack bool
(requires (fun h ->
valid p h sl pos
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == f (contents p h sl pos)
))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack bool
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == L.existsb f (contents_list p h sl pos pos')
))
= let h = HST.get () in
list_existsb_find f (contents_list p h sl pos pos');
let posn = list_find j f f' sl pos pos' in
posn <> pos'
#push-options "--fuel 2 --ifuel 1 --z3rlimit 256 --query_stats"
inline_for_extraction
noextract
let list_flatten_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 { k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0 } )
(f: (t1 -> Tot (list t2))) // should be GTot, but List.Tot.map requires Tot
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1' : U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2: U32.t {
valid_list p1 h0 sl1 pos1 pos1' /\
U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\
U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32
})
(f' : (
(pos1_: U32.t) ->
(pos2_: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\
U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\ (
let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2_ res /\
contents_list p2 h' sl2 pos2_ res == y
)))
))
: HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\ (
let y = List.Tot.flatten (List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1')) in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2 res /\
contents_list p2 h' sl2 pos2 res == y
)))
= let hz = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bpos2_ = BF.alloca pos2 1ul in
let h2 = HST.get () in
valid_list_nil p2 hz sl2 pos2;
let fits = list_fold_left_gen
p1
j1
sl1
pos1 pos1'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2))
(fun h ll lr _ ->
B.modifies (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h /\
B.live h bpos2_ /\ (
let pos2_ = Seq.index (B.as_seq h bpos2_) 0 in
contents_list p1 h0 sl1 pos1 pos1' == ll `List.Tot.append` lr /\
valid_list p2 h sl2 pos2 pos2_ /\
contents_list p2 h sl2 pos2 pos2_ == List.Tot.flatten (List.Tot.map f ll) /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(fun h _ _ _ h' ->
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun h ->
B.modifies (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h /\
U32.v pos2 + serialized_list_length s2 (List.Tot.flatten (List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1'))) > U32.v sl2.len
)
(fun h h' ->
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1l pos1r ->
let pos2_ = B.index bpos2_ 0ul in
let h = HST.get () in
writable_weaken sl2.base (U32.v pos2) (U32.v sl2.len) h (U32.v pos2_) (U32.v sl2.len);
valid_pos_frame_strong p1 h0 sl1 pos1l pos1r (loc_slice_from sl2 pos2) hz;
let res = f' pos1l pos2_ in
let fits = not (res = max_uint32) in
if fits then begin
B.upd bpos2_ 0ul res;
let h' = HST.get () in
writable_modifies sl2.base (U32.v pos2) (U32.v sl2.len) h (B.loc_region_only true (HS.get_tip h1)) h' ;
List.Tot.append_assoc (contents_list p1 h0 sl1 pos1 pos1l) [contents p1 h0 sl1 pos1l] (contents_list p1 h0 sl1 pos1r pos1');
list_flatten_map_append f (contents_list p1 h0 sl1 pos1 pos1l) [contents p1 h0 sl1 pos1l];
valid_list_snoc p1 h0 sl1 pos1 pos1l;
valid_list_append p2 h' sl2 pos2 pos2_ res;
valid_list_nil p2 h' sl2 res;
valid_list_append p2 h' sl2 pos2_ res res
end else begin
let h' = HST.get () in
valid_list_cons p1 h0 sl1 pos1l pos1' ;
valid_list_append p1 h0 sl1 pos1 pos1l pos1' ;
list_flatten_map_append f (contents_list p1 h0 sl1 pos1 pos1l) (contents_list p1 h0 sl1 pos1l pos1');
serialized_list_length_append s2 (L.flatten (L.map f (contents_list p1 h0 sl1 pos1 pos1l))) (L.flatten (L.map f (contents_list p1 h0 sl1 pos1l pos1')));
serialized_list_length_append s2 (f (contents p1 h0 sl1 pos1l)) (L.flatten (L.map f (contents_list p1 h0 sl1 pos1r pos1')));
valid_list_serialized_list_length s2 h' sl2 pos2 pos2_
end;
fits
)
in
let res =
if fits
then B.index bpos2_ 0ul
else max_uint32
in
HST.pop_frame ();
res
#pop-options
#push-options "--z3rlimit 16 --query_stats"
inline_for_extraction
noextract
let list_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 { k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0 } )
(f: (t1 -> Tot t2)) // should be GTot, but List.Tot.map requires Tot
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1' : U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2: U32.t {
valid_list p1 h0 sl1 pos1 pos1' /\
U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\
U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32
})
(f' : (
(pos1_: U32.t) ->
(pos2_: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\
U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\ (
let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_length s2 y > U32.v sl2.len
else
valid_content_pos p2 h' sl2 pos2_ y res
)))
))
: HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\ (
let y = List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1') in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2 res /\
contents_list p2 h' sl2 pos2 res == y | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val list_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 {k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0})
(f: (t1 -> Tot t2))
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1': U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2:
U32.t
{ valid_list p1 h0 sl1 pos1 pos1' /\ U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\ U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32 })
(f':
(pos1_: U32.t -> pos2_: U32.t
-> HST.Stack U32.t
(requires
(fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\ valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\ U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h))
(ensures
(fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\
(let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_length s2 y > U32.v sl2.len
else valid_content_pos p2 h' sl2 pos2_ y res)))))
: HST.Stack U32.t
(requires
(fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\ live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h))
(ensures
(fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\
(let y = List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1') in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else valid_list p2 h' sl2 pos2 res /\ contents_list p2 h' sl2 pos2 res == y))) | [] | LowParse.Low.Base.list_map | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
j1: LowParse.Low.Base.jumper p1 ->
s2:
LowParse.Spec.Base.serializer p2
{ Mkparser_kind'?.parser_kind_subkind k2 ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong /\
Mkparser_kind'?.parser_kind_low k2 > 0 } ->
f: (_: t1 -> t2) ->
h0: FStar.Monotonic.HyperStack.mem ->
sl1: LowParse.Slice.slice rrel1 rel1 ->
pos1: FStar.UInt32.t ->
pos1': FStar.UInt32.t ->
sl2: LowParse.Slice.slice rrel2 rel2 ->
pos2:
FStar.UInt32.t
{ LowParse.Low.Base.Spec.valid_list p1 h0 sl1 pos1 pos1' /\
FStar.UInt32.v pos1 <= FStar.UInt32.v pos1' /\
FStar.UInt32.v pos1' <= FStar.UInt32.v (Mkslice?.len sl1) /\
FStar.UInt32.v pos2 <= FStar.UInt32.v (Mkslice?.len sl2) /\
LowStar.Monotonic.Buffer.loc_disjoint (LowParse.Slice.loc_slice_from_to sl1 pos1 pos1')
(LowParse.Slice.loc_slice_from sl2 pos2) /\
FStar.UInt32.v (Mkslice?.len sl2) < FStar.UInt32.v LowParse.Low.ErrorCode.max_uint32 } ->
f': (pos1_: FStar.UInt32.t -> pos2_: FStar.UInt32.t -> FStar.HyperStack.ST.Stack FStar.UInt32.t)
-> FStar.HyperStack.ST.Stack FStar.UInt32.t | {
"end_col": 5,
"end_line": 1969,
"start_col": 2,
"start_line": 1948
} |
Prims.Tot | val jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit{k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U32.v sz})
: Tot (jumper p) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u | val jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit{k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U32.v sz})
: Tot (jumper p)
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit{k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U32.v sz})
: Tot (jumper p) = | false | null | false | jump_constant_size' (fun _ -> p) sz u | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"FStar.UInt32.t",
"Prims.unit",
"Prims.l_and",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"Prims.nat",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_high",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_low",
"Prims.int",
"Prims.l_or",
"Prims.b2t",
"Prims.op_GreaterThanOrEqual",
"FStar.UInt.size",
"FStar.UInt32.n",
"FStar.UInt32.v",
"LowParse.Low.Base.jump_constant_size'",
"LowParse.Low.Base.jumper"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
}) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit{k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U32.v sz})
: Tot (jumper p) | [] | LowParse.Low.Base.jump_constant_size | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
p: LowParse.Spec.Base.parser k t ->
sz: FStar.UInt32.t ->
u131:
u132:
Prims.unit
{ Mkparser_kind'?.parser_kind_high k ==
FStar.Pervasives.Native.Some (Mkparser_kind'?.parser_kind_low k) /\
Mkparser_kind'?.parser_kind_low k == FStar.UInt32.v sz }
-> LowParse.Low.Base.jumper p | {
"end_col": 39,
"end_line": 419,
"start_col": 2,
"start_line": 419
} |
Prims.Tot | val accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq)) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos | val accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq)) = | false | null | false | fun #rrel #rel input pos ->
let h = HST.get () in
[@@ inline_let ]let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Low.Base.Spec.clens",
"LowParse.Low.Base.Spec.gaccessor",
"LowParse.Low.Base.accessor",
"Prims.squash",
"LowParse.Low.Base.Spec.clens_eq",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"FStar.UInt32.t",
"Prims.unit",
"LowParse.Low.Base.Spec.gaccessor_ext_eq",
"LowParse.Slice.bytes_of_slice_from",
"LowParse.Low.Base.Spec.slice_access_eq",
"LowParse.Low.Base.Spec.gaccessor_ext",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl')) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq)) | [] | LowParse.Low.Base.accessor_ext | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
a: LowParse.Low.Base.accessor g ->
cl': LowParse.Low.Base.Spec.clens t1 t2 ->
sq: Prims.squash (LowParse.Low.Base.Spec.clens_eq cl cl')
-> LowParse.Low.Base.accessor (LowParse.Low.Base.Spec.gaccessor_ext g cl' sq) | {
"end_col": 13,
"end_line": 103,
"start_col": 2,
"start_line": 95
} |
FStar.Pervasives.Lemma | val valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires
(k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal))
(ensures
((valid p h input pos <==>
(live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low))) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos | val valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires
(k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal))
(ensures
((valid p h input pos <==>
(live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)))
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires
(k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal))
(ensures
((valid p h input pos <==>
(live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low))) = | false | null | true | parser_kind_prop_equiv k p;
valid_facts p h input pos | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"lemma"
] | [
"FStar.Monotonic.HyperStack.mem",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"Prims.nat",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"FStar.UInt32.t",
"LowParse.Low.Base.Spec.valid_facts",
"Prims.unit",
"LowParse.Spec.Base.parser_kind_prop_equiv",
"Prims.l_and",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_high",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_low",
"LowParse.Spec.Base.parser_kind_metadata_some",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_metadata",
"LowParse.Spec.Base.ParserKindMetadataTotal",
"Prims.squash",
"Prims.l_iff",
"LowParse.Low.Base.Spec.valid",
"LowParse.Slice.live_slice",
"Prims.b2t",
"Prims.op_GreaterThanOrEqual",
"Prims.op_Subtraction",
"FStar.UInt32.v",
"LowParse.Slice.__proj__Mkslice__item__len",
"Prims.l_imp",
"LowParse.Low.Base.Spec.content_length",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires
(k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal))
(ensures
((valid p h input pos <==>
(live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low))) | [] | LowParse.Low.Base.valid_total_constant_size | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
h: FStar.Monotonic.HyperStack.mem ->
p: LowParse.Spec.Base.parser k t ->
sz: Prims.nat ->
input: LowParse.Slice.slice rrel rel ->
pos: FStar.UInt32.t
-> FStar.Pervasives.Lemma
(requires
Mkparser_kind'?.parser_kind_high k ==
FStar.Pervasives.Native.Some (Mkparser_kind'?.parser_kind_low k) /\
Mkparser_kind'?.parser_kind_low k == sz /\
Mkparser_kind'?.parser_kind_metadata k ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserKindMetadataTotal)
(ensures
(LowParse.Low.Base.Spec.valid p h input pos <==>
LowParse.Slice.live_slice h input /\
FStar.UInt32.v (Mkslice?.len input) - FStar.UInt32.v pos >=
Mkparser_kind'?.parser_kind_low k) /\
(LowParse.Low.Base.Spec.valid p h input pos ==>
LowParse.Low.Base.Spec.content_length p h input pos == Mkparser_kind'?.parser_kind_low k)) | {
"end_col": 27,
"end_line": 285,
"start_col": 2,
"start_line": 284
} |
Prims.Tot | val validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c:
error_code
{ k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal })
: Tot (validator p) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz) | val validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c:
error_code
{ k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal })
: Tot (validator p)
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c:
error_code
{ k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal })
: Tot (validator p) = | false | null | false | fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@@ inline_let ]let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt ((Cast.uint32_to_uint64 input.len) `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else (pos `U64.add` sz) | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"FStar.UInt64.t",
"LowParse.Low.ErrorCode.error_code",
"Prims.l_and",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"Prims.nat",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_high",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_low",
"Prims.int",
"Prims.l_or",
"Prims.b2t",
"Prims.op_GreaterThanOrEqual",
"FStar.UInt.size",
"FStar.UInt64.n",
"FStar.UInt64.v",
"LowParse.Spec.Base.parser_kind_metadata_some",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_metadata",
"LowParse.Spec.Base.ParserKindMetadataTotal",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"FStar.UInt64.lt",
"FStar.UInt64.sub",
"FStar.Int.Cast.uint32_to_uint64",
"LowParse.Slice.__proj__Mkslice__item__len",
"LowParse.Low.ErrorCode.set_validator_error_pos_and_code",
"LowParse.Low.ErrorCode.validator_error_not_enough_data",
"Prims.bool",
"FStar.UInt64.add",
"Prims.unit",
"LowParse.Low.Base.valid_total_constant_size",
"LowParse.Low.ErrorCode.uint64_to_uint32",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowParse.Low.Base.validator"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
}) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c:
error_code
{ k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal })
: Tot (validator p) | [] | LowParse.Low.Base.validate_total_constant_size_with_error_code | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
p: LowParse.Spec.Base.parser k t ->
sz: FStar.UInt64.t ->
c:
LowParse.Low.ErrorCode.error_code
{ Mkparser_kind'?.parser_kind_high k ==
FStar.Pervasives.Native.Some (Mkparser_kind'?.parser_kind_low k) /\
Mkparser_kind'?.parser_kind_low k == FStar.UInt64.v sz /\
Mkparser_kind'?.parser_kind_metadata k ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserKindMetadataTotal }
-> LowParse.Low.Base.validator p | {
"end_col": 22,
"end_line": 339,
"start_col": 2,
"start_line": 333
} |
FStar.HyperStack.ST.Stack | val list_fold_left
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos': U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc {B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos')})
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame:
(h: HS.mem -> l1: list t -> l2: list t -> pos1: U32.t -> h': HS.mem
-> Lemma (requires (B.modifies (B.loc_unused_in h0) h h' /\ inv h l1 l2 pos1))
(ensures (inv h' l1 l2 pos1))))
(body:
(
pos1: U32.t ->
pos2: U32.t ->
l1: Ghost.erased (list t) ->
x: Ghost.erased t ->
l2: Ghost.erased (list t)
-> HST.Stack unit
(requires
(fun h ->
B.modifies (Ghost.reveal l) h0 h /\ valid_list p h0 sl pos pos' /\
valid_content_pos p h0 sl pos1 (G.reveal x) pos2 /\ U32.v pos <= U32.v pos1 /\
U32.v pos2 <= U32.v pos' /\
B.loc_includes (loc_slice_from_to sl pos pos')
(loc_slice_from_to sl pos1 pos2) /\
inv h (Ghost.reveal l1) (Ghost.reveal x :: Ghost.reveal l2) pos1 /\
contents_list p h0 sl pos pos' ==
(Ghost.reveal l1)
`L.append`
(Ghost.reveal x :: Ghost.reveal l2)))
(ensures
(fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h'
((Ghost.reveal l1) `L.append` [contents p h0 sl pos1])
(Ghost.reveal l2)
pos2))))
: HST.Stack unit
(requires
(fun h ->
h == h0 /\ valid_list p h sl pos pos' /\ inv h [] (contents_list p h sl pos pos') pos))
(ensures
(fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\ inv h' (contents_list p h sl pos pos') [] pos')) | [
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "BF"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let list_fold_left
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
(l1: Ghost.erased (list t)) ->
(x: Ghost.erased t) ->
(l2: Ghost.erased (list t)) ->
HST.Stack unit
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos' /\
valid_content_pos p h0 sl pos1 (G.reveal x) pos2 /\
U32.v pos <= U32.v pos1 /\ U32.v pos2 <= U32.v pos' /\
B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2) /\
inv h (Ghost.reveal l1) (Ghost.reveal x :: Ghost.reveal l2) pos1 /\
contents_list p h0 sl pos pos' == Ghost.reveal l1 `L.append` (Ghost.reveal x :: Ghost.reveal l2)
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (Ghost.reveal l1 `L.append` [contents p h0 sl pos1]) (Ghost.reveal l2) pos2
))
))
: HST.Stack unit
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (contents_list p h sl pos pos') [] pos'
))
= let _ = list_fold_left_gen
p
j
sl
pos pos'
h0
l
inv
inv_frame
(fun _ -> False)
(fun _ _ -> ())
(fun pos1 pos2 ->
let h = HST.get () in
valid_list_cons p h sl pos1 pos';
valid_list_append p h sl pos pos1 pos';
body
pos1
pos2
(Ghost.hide (contents_list p h sl pos pos1))
(Ghost.hide (contents p h sl pos1))
(Ghost.hide (contents_list p h sl pos2 pos'))
;
true
)
in
() | val list_fold_left
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos': U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc {B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos')})
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame:
(h: HS.mem -> l1: list t -> l2: list t -> pos1: U32.t -> h': HS.mem
-> Lemma (requires (B.modifies (B.loc_unused_in h0) h h' /\ inv h l1 l2 pos1))
(ensures (inv h' l1 l2 pos1))))
(body:
(
pos1: U32.t ->
pos2: U32.t ->
l1: Ghost.erased (list t) ->
x: Ghost.erased t ->
l2: Ghost.erased (list t)
-> HST.Stack unit
(requires
(fun h ->
B.modifies (Ghost.reveal l) h0 h /\ valid_list p h0 sl pos pos' /\
valid_content_pos p h0 sl pos1 (G.reveal x) pos2 /\ U32.v pos <= U32.v pos1 /\
U32.v pos2 <= U32.v pos' /\
B.loc_includes (loc_slice_from_to sl pos pos')
(loc_slice_from_to sl pos1 pos2) /\
inv h (Ghost.reveal l1) (Ghost.reveal x :: Ghost.reveal l2) pos1 /\
contents_list p h0 sl pos pos' ==
(Ghost.reveal l1)
`L.append`
(Ghost.reveal x :: Ghost.reveal l2)))
(ensures
(fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h'
((Ghost.reveal l1) `L.append` [contents p h0 sl pos1])
(Ghost.reveal l2)
pos2))))
: HST.Stack unit
(requires
(fun h ->
h == h0 /\ valid_list p h sl pos pos' /\ inv h [] (contents_list p h sl pos pos') pos))
(ensures
(fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\ inv h' (contents_list p h sl pos pos') [] pos'))
let list_fold_left
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos': U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc {B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos')})
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame:
(h: HS.mem -> l1: list t -> l2: list t -> pos1: U32.t -> h': HS.mem
-> Lemma (requires (B.modifies (B.loc_unused_in h0) h h' /\ inv h l1 l2 pos1))
(ensures (inv h' l1 l2 pos1))))
(body:
(
pos1: U32.t ->
pos2: U32.t ->
l1: Ghost.erased (list t) ->
x: Ghost.erased t ->
l2: Ghost.erased (list t)
-> HST.Stack unit
(requires
(fun h ->
B.modifies (Ghost.reveal l) h0 h /\ valid_list p h0 sl pos pos' /\
valid_content_pos p h0 sl pos1 (G.reveal x) pos2 /\ U32.v pos <= U32.v pos1 /\
U32.v pos2 <= U32.v pos' /\
B.loc_includes (loc_slice_from_to sl pos pos')
(loc_slice_from_to sl pos1 pos2) /\
inv h (Ghost.reveal l1) (Ghost.reveal x :: Ghost.reveal l2) pos1 /\
contents_list p h0 sl pos pos' ==
(Ghost.reveal l1)
`L.append`
(Ghost.reveal x :: Ghost.reveal l2)))
(ensures
(fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h'
((Ghost.reveal l1) `L.append` [contents p h0 sl pos1])
(Ghost.reveal l2)
pos2))))
: HST.Stack unit
(requires
(fun h ->
h == h0 /\ valid_list p h sl pos pos' /\ inv h [] (contents_list p h sl pos pos') pos))
(ensures
(fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\ inv h' (contents_list p h sl pos pos') [] pos')) = | true | null | false | let _ =
list_fold_left_gen p j sl pos pos' h0 l inv inv_frame (fun _ -> False) (fun _ _ -> ())
(fun pos1 pos2 ->
let h = HST.get () in
valid_list_cons p h sl pos1 pos';
valid_list_append p h sl pos pos1 pos';
body pos1
pos2
(Ghost.hide (contents_list p h sl pos pos1))
(Ghost.hide (contents p h sl pos1))
(Ghost.hide (contents_list p h sl pos2 pos'));
true)
in
() | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [] | [
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Low.Base.jumper",
"LowParse.Slice.slice",
"FStar.UInt32.t",
"FStar.Monotonic.HyperStack.mem",
"FStar.Ghost.erased",
"LowStar.Monotonic.Buffer.loc",
"LowStar.Monotonic.Buffer.loc_disjoint",
"FStar.Ghost.reveal",
"LowParse.Slice.loc_slice_from_to",
"Prims.list",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_unused_in",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowParse.Low.Base.Spec.valid_list",
"LowParse.Low.Base.Spec.valid_content_pos",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.loc_includes",
"Prims.Cons",
"Prims.eq2",
"LowParse.Low.Base.Spec.contents_list",
"FStar.List.Tot.Base.append",
"LowParse.Low.Base.Spec.contents",
"Prims.bool",
"LowParse.Low.Base.list_fold_left_gen",
"Prims.l_False",
"FStar.Ghost.hide",
"LowParse.Low.Base.Spec.valid_list_append",
"LowParse.Low.Base.Spec.valid_list_cons",
"FStar.HyperStack.ST.get"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_weak
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
U32.v sl.len < U32.v max_uint32 /\
writable sl.base (U32.v pos) (U32.v sl.len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from sl pos) h h' /\ (
if pos' = max_uint32
then U32.v pos + serialized_length s x > U32.v sl.len
else valid_content_pos p h' sl pos x pos'
)))
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_strong
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let sq = B.as_seq h sl.base in
let len = serialized_length s x in
live_slice h sl /\
U32.v pos + len <= U32.v sl.len /\
writable sl.base (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from_to sl pos pos') h h' /\
valid_content_pos p h' sl pos x pos'
))
[@unifier_hint_injective]
inline_for_extraction
let serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(b: B.mbuffer byte rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v pos + len <= B.length b /\
writable b (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b pos (pos `U32.add` len)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x
)))
inline_for_extraction
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
= fun x #rrel #rel b pos -> s1' x b pos
inline_for_extraction
let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(posl: Ghost.erased U32.t)
(posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\
U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos) /\
Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr))
)))
=
let h0 = HST.get () in
writable_weaken b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 (U32.v pos) (U32.v pos + Seq.length (serialize s x));
let res = s32 x b pos in
let h1 = HST.get () in
let pos' = pos `U32.add` res in
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos pos';
writable_modifies b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 B.loc_none h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) (Ghost.reveal posl) pos;
B.loc_disjoint_loc_buffer_from_to b (Ghost.reveal posl) pos pos pos';
B.modifies_buffer_from_to_elim b (Ghost.reveal posl) pos (B.loc_buffer_from_to b pos pos') h0 h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos' (Ghost.reveal posr);
B.loc_disjoint_loc_buffer_from_to b pos pos' pos' (Ghost.reveal posr);
B.modifies_buffer_from_to_elim b pos' (Ghost.reveal posr) (B.loc_buffer_from_to b pos pos') h0 h1;
res
inline_for_extraction
let leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s)
= fun x #rrel #rel input pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let len = s32 x input.base pos in
[@inline_let]
let pos' = pos `U32.add` len in
let h = HST.get () in
[@inline_let] let _ =
let large = bytes_of_slice_from h input pos in
let small = bytes_of_slice_from_to h input pos pos' in
parse_strong_prefix p small large;
valid_facts p h input pos
in
pos'
inline_for_extraction
let leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32
))
: Tot (leaf_writer_weak s)
= fun x #rrel #rel input pos ->
if (input.len `U32.sub` pos) `U32.lt` sz
then max_uint32
else begin
let h = HST.get () in
writable_weaken input.base (U32.v pos) (U32.v input.len) h (U32.v pos) (U32.v pos + U32.v sz);
s32 x input pos
end
inline_for_extraction
let serializer32_of_leaf_writer_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_subkind == Some ParserStrong /\
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
))
: Tot (serializer32 s)
= fun x #rrel #rel b pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let pos' = s32 x (make_slice b (pos `U32.add` sz)) pos in
[@inline_let]
let len = pos' `U32.sub` pos in
let h = HST.get () in
[@inline_let] let _ =
valid_valid_exact p h (make_slice b (pos `U32.add` sz)) pos;
valid_exact_serialize s h (make_slice b (pos `U32.add` sz)) pos pos'
in
len
inline_for_extraction
let blit_strong
(#a:Type) (#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src:U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst:U32.t)
(len:U32.t)
: HST.Stack unit
(requires (fun h ->
B.live h src /\ B.live h dst /\
U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len)) (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)
))
= let h = HST.get () in
B.blit src idx_src dst idx_dst len;
let h' = HST.get () in
B.modifies_loc_buffer_from_to_intro dst idx_dst (idx_dst `U32.add` len) B.loc_none h h'
#push-options "--z3rlimit 16"
inline_for_extraction
let copy_strong
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
U32.v dpos + U32.v spos' - U32.v spos <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from_to dst dpos (dpos `U32.add` (spos' `U32.sub` spos)))
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos' /\
dpos' `U32.sub` dpos == spos' `U32.sub` spos
))
= let h0 = HST.get () in
let len = spos' `U32.sub` spos in
valid_facts p h0 src spos;
writable_replace_subseq_elim dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h0 (Seq.slice (B.as_seq h0 src.base) (U32.v spos) (U32.v spos'));
blit_strong src.base spos dst.base dpos len;
let h = HST.get () in
[@inline_let] let dpos' = dpos `U32.add` len in
parse_strong_prefix p (bytes_of_slice_from h0 src spos) (bytes_of_slice_from h dst dpos);
valid_facts p h dst dpos;
dpos'
#pop-options
inline_for_extraction
let copy_strong'
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\ (
let clen = content_length p h src spos in
U32.v dpos + clen <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + clen) h /\
B.loc_disjoint (loc_slice_from src spos) (loc_slice_from_to dst dpos (dpos `U32.add` (U32.uint_to_t clen)))
)))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
))
= let spos' = j src spos in
copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak_with_length
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= if (dst.len `U32.sub` dpos) `U32.lt` (spos' `U32.sub` spos)
then max_uint32
else copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(jmp: jumper p)
(src: slice rrel1 rel1)
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (content_length p h src spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos (get_valid_pos p h src spos)) (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= let spos' = jmp src spos in
copy_weak_with_length p src spos spos' dst dpos
(* fold_left on lists *)
module BF = LowStar.Buffer
#push-options "--z3rlimit 256 --fuel 1 --ifuel 1"
#restart-solver
inline_for_extraction
let list_fold_left_gen
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(post_interrupt: ((h: HS.mem) -> GTot Type0))
(post_interrupt_frame: (h: HS.mem) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
post_interrupt h
)) (ensures (post_interrupt h')))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
HST.Stack bool
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos1 /\
valid_pos p h0 sl pos1 pos2 /\
valid_list p h0 sl pos2 pos' /\
inv h (contents_list p h0 sl pos pos1) (contents p h0 sl pos1 :: contents_list p h0 sl pos2 pos') pos1
))
(ensures (fun h ctinue h' ->
B.modifies (Ghost.reveal l) h h' /\
(if ctinue then inv h' (contents_list p h0 sl pos pos1 `L.append` [contents p h0 sl pos1]) (contents_list p h0 sl pos2 pos') pos2 else post_interrupt h')
))
))
: HST.Stack bool
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h res h' ->
B.modifies (Ghost.reveal l) h h' /\
(if res then inv h' (contents_list p h sl pos pos') [] pos' else post_interrupt h')
))
= HST.push_frame ();
let h1 = HST.get () in
// B.fresh_frame_modifies h0 h1;
let bpos : BF.pointer U32.t = BF.alloca pos 1ul in
let bctinue : BF.pointer bool = BF.alloca true 1ul in
let btest: BF.pointer bool = BF.alloca (pos `U32.lt` pos') 1ul in
let h2 = HST.get () in
assert (B.modifies B.loc_none h0 h2);
let test_pre (h: HS.mem) : GTot Type0 =
B.live h bpos /\ B.live h bctinue /\ B.live h btest /\ (
let pos1 = Seq.index (B.as_seq h bpos) 0 in
let ctinue = Seq.index (B.as_seq h bctinue) 0 in
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
B.modifies (Ghost.reveal l `B.loc_union` B.loc_region_only true (HS.get_tip h1)) h2 h /\
Seq.index (B.as_seq h btest) 0 == ((U32.v (Seq.index (B.as_seq h bpos) 0) < U32.v pos') && Seq.index (B.as_seq h bctinue) 0) /\
(if ctinue then inv h (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 else post_interrupt h)
)
in
let test_post (cond: bool) (h: HS.mem) : GTot Type0 =
test_pre h /\
cond == Seq.index (B.as_seq h btest) 0
in
valid_list_nil p h0 sl pos;
inv_frame h0 [] (contents_list p h0 sl pos pos') pos h1;
inv_frame h1 [] (contents_list p h0 sl pos pos') pos h2;
[@inline_let]
let while_body () : HST.Stack unit
(requires (fun h -> test_post true h))
(ensures (fun _ _ h1 -> test_pre h1))
=
let h51 = HST.get () in
let pos1 = B.index bpos 0ul in
valid_list_cons_recip p h0 sl pos1 pos';
//assert (B.modifies (Ghost.reveal l `B.loc_union` B.loc_buffer bpos) h0 h51);
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos'));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos pos1));
valid_list_cons_recip p h51 sl pos1 pos';
let pos2 = j sl pos1 in
let h52 = HST.get () in
inv_frame h51 (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 h52;
B.modifies_only_not_unused_in (Ghost.reveal l) h0 h52;
let ctinue = body pos1 pos2 in
let h53 = HST.get () in
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos2 pos'));
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h1));
valid_pos_frame_strong p h0 sl pos1 pos2 (Ghost.reveal l) h53;
valid_list_snoc p h0 sl pos pos1;
B.upd bpos 0ul pos2;
B.upd bctinue 0ul ctinue;
B.upd btest 0ul ((pos2 `U32.lt` pos') && ctinue);
let h54 = HST.get () in
[@inline_let]
let _ =
if ctinue
then inv_frame h53 (contents_list p h0 sl pos pos2) (contents_list p h0 sl pos2 pos') pos2 h54
else post_interrupt_frame h53 h54
in
()
in
C.Loops.while
#test_pre
#test_post
(fun (_: unit) -> (
B.index btest 0ul) <: HST.Stack bool (requires (fun h -> test_pre h)) (ensures (fun h x h1 -> test_post x h1)))
while_body
;
valid_list_nil p h0 sl pos';
let res = B.index bctinue 0ul in
let h3 = HST.get () in
HST.pop_frame ();
let h4 = HST.get () in
//B.popped_modifies h3 h4;
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h3));
[@inline_let]
let _ =
if res
then inv_frame h3 (contents_list p h0 sl pos pos') [] pos' h4
else post_interrupt_frame h3 h4
in
res
#pop-options
//B.loc_includes_union_l (B.loc_all_regions_from false (HS.get_tip h1)) (Ghost.reveal l) (Ghost.reveal l)
//B.modifies_fresh_frame_popped h0 h1 (Ghost.reveal l) h3 h4
module G = FStar.Ghost
inline_for_extraction
let list_fold_left
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
(l1: Ghost.erased (list t)) ->
(x: Ghost.erased t) ->
(l2: Ghost.erased (list t)) ->
HST.Stack unit
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos' /\
valid_content_pos p h0 sl pos1 (G.reveal x) pos2 /\
U32.v pos <= U32.v pos1 /\ U32.v pos2 <= U32.v pos' /\
B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2) /\
inv h (Ghost.reveal l1) (Ghost.reveal x :: Ghost.reveal l2) pos1 /\
contents_list p h0 sl pos pos' == Ghost.reveal l1 `L.append` (Ghost.reveal x :: Ghost.reveal l2)
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (Ghost.reveal l1 `L.append` [contents p h0 sl pos1]) (Ghost.reveal l2) pos2
))
))
: HST.Stack unit
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (contents_list p h sl pos pos') [] pos' | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val list_fold_left
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos': U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc {B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos')})
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame:
(h: HS.mem -> l1: list t -> l2: list t -> pos1: U32.t -> h': HS.mem
-> Lemma (requires (B.modifies (B.loc_unused_in h0) h h' /\ inv h l1 l2 pos1))
(ensures (inv h' l1 l2 pos1))))
(body:
(
pos1: U32.t ->
pos2: U32.t ->
l1: Ghost.erased (list t) ->
x: Ghost.erased t ->
l2: Ghost.erased (list t)
-> HST.Stack unit
(requires
(fun h ->
B.modifies (Ghost.reveal l) h0 h /\ valid_list p h0 sl pos pos' /\
valid_content_pos p h0 sl pos1 (G.reveal x) pos2 /\ U32.v pos <= U32.v pos1 /\
U32.v pos2 <= U32.v pos' /\
B.loc_includes (loc_slice_from_to sl pos pos')
(loc_slice_from_to sl pos1 pos2) /\
inv h (Ghost.reveal l1) (Ghost.reveal x :: Ghost.reveal l2) pos1 /\
contents_list p h0 sl pos pos' ==
(Ghost.reveal l1)
`L.append`
(Ghost.reveal x :: Ghost.reveal l2)))
(ensures
(fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h'
((Ghost.reveal l1) `L.append` [contents p h0 sl pos1])
(Ghost.reveal l2)
pos2))))
: HST.Stack unit
(requires
(fun h ->
h == h0 /\ valid_list p h sl pos pos' /\ inv h [] (contents_list p h sl pos pos') pos))
(ensures
(fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\ inv h' (contents_list p h sl pos pos') [] pos')) | [] | LowParse.Low.Base.list_fold_left | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
p: LowParse.Spec.Base.parser k t ->
j: LowParse.Low.Base.jumper p ->
sl: LowParse.Slice.slice rrel rel ->
pos: FStar.UInt32.t ->
pos': FStar.UInt32.t ->
h0: FStar.Monotonic.HyperStack.mem ->
l:
FStar.Ghost.erased LowStar.Monotonic.Buffer.loc
{ LowStar.Monotonic.Buffer.loc_disjoint (FStar.Ghost.reveal l)
(LowParse.Slice.loc_slice_from_to sl pos pos') } ->
inv:
(_: FStar.Monotonic.HyperStack.mem -> _: Prims.list t -> _: Prims.list t -> _: FStar.UInt32.t
-> Prims.GTot Type0) ->
inv_frame:
(
h: FStar.Monotonic.HyperStack.mem ->
l1: Prims.list t ->
l2: Prims.list t ->
pos1: FStar.UInt32.t ->
h': FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.modifies (LowStar.Monotonic.Buffer.loc_unused_in h0) h h' /\
inv h l1 l2 pos1) (ensures inv h' l1 l2 pos1)) ->
body:
(
pos1: FStar.UInt32.t ->
pos2: FStar.UInt32.t ->
l1: FStar.Ghost.erased (Prims.list t) ->
x: FStar.Ghost.erased t ->
l2: FStar.Ghost.erased (Prims.list t)
-> FStar.HyperStack.ST.Stack Prims.unit)
-> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 4,
"end_line": 1357,
"start_col": 1,
"start_line": 1332
} |
FStar.Pervasives.Lemma | val valid_weaken
(k1 #k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
: Lemma (requires (k1 `is_weaker_than` k2))
(ensures
((valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==>
(valid p2 h sl pos /\
valid_content_pos (weaken k1 p2)
h
sl
pos
(contents p2 h sl pos)
(get_valid_pos p2 h sl pos)))) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos | val valid_weaken
(k1 #k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
: Lemma (requires (k1 `is_weaker_than` k2))
(ensures
((valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==>
(valid p2 h sl pos /\
valid_content_pos (weaken k1 p2)
h
sl
pos
(contents p2 h sl pos)
(get_valid_pos p2 h sl pos))))
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel
#rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma (requires (k1 `is_weaker_than` k2))
(ensures
((valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==>
(valid p2 h sl pos /\
valid_content_pos (weaken k1 p2)
h
sl
pos
(contents p2 h sl pos)
(get_valid_pos p2 h sl pos)))) = | false | null | true | valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"lemma"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"FStar.Monotonic.HyperStack.mem",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"FStar.UInt32.t",
"LowParse.Low.Base.Spec.valid_facts",
"Prims.unit",
"LowParse.Spec.Base.weaken",
"LowParse.Spec.Base.is_weaker_than",
"Prims.squash",
"Prims.l_imp",
"Prims.l_or",
"LowParse.Low.Base.Spec.valid",
"Prims.l_and",
"LowParse.Low.Base.Spec.valid_content_pos",
"LowParse.Low.Base.Spec.contents",
"LowParse.Low.Base.Spec.get_valid_pos",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val valid_weaken
(k1 #k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
: Lemma (requires (k1 `is_weaker_than` k2))
(ensures
((valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==>
(valid p2 h sl pos /\
valid_content_pos (weaken k1 p2)
h
sl
pos
(contents p2 h sl pos)
(get_valid_pos p2 h sl pos)))) | [] | LowParse.Low.Base.valid_weaken | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
k1: LowParse.Spec.Base.parser_kind ->
p2: LowParse.Spec.Base.parser k2 t ->
h: FStar.Monotonic.HyperStack.mem ->
sl: LowParse.Slice.slice rrel rel ->
pos: FStar.UInt32.t
-> FStar.Pervasives.Lemma (requires LowParse.Spec.Base.is_weaker_than k1 k2)
(ensures
LowParse.Low.Base.Spec.valid (LowParse.Spec.Base.weaken k1 p2) h sl pos \/
LowParse.Low.Base.Spec.valid p2 h sl pos ==>
LowParse.Low.Base.Spec.valid p2 h sl pos /\
LowParse.Low.Base.Spec.valid_content_pos (LowParse.Spec.Base.weaken k1 p2)
h
sl
pos
(LowParse.Low.Base.Spec.contents p2 h sl pos)
(LowParse.Low.Base.Spec.get_valid_pos p2 h sl pos)) | {
"end_col": 25,
"end_line": 358,
"start_col": 2,
"start_line": 357
} |
FStar.HyperStack.ST.Stack | val list_length
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos': U32.t)
: HST.Stack U32.t
(requires (fun h -> valid_list p h sl pos pos'))
(ensures
(fun h res h' ->
B.modifies B.loc_none h h' /\ U32.v res == L.length (contents_list p h sl pos pos'))) | [
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "BF"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let list_length
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v res == L.length (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
B.fresh_frame_modifies h0 h1;
let blen : BF.pointer U32.t = BF.alloca 0ul 1ul in
let h2 = HST.get () in
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer blen))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_buffer blen) h2 h /\
B.live h blen /\ (
let len = U32.v (Seq.index (B.as_seq h blen) 0) in
len <= U32.v pos1 /\ // necessary to prove that length computations do not overflow
len == L.length l1
))
(fun h l1 l2 pos1 h' ->
B.modifies_only_not_unused_in (B.loc_buffer blen) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
B.upd blen 0ul (B.index blen 0ul `U32.add` 1ul);
Classical.forall_intro_2 (list_length_append #t)
)
;
let len = B.index blen 0ul in
HST.pop_frame ();
len | val list_length
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos': U32.t)
: HST.Stack U32.t
(requires (fun h -> valid_list p h sl pos pos'))
(ensures
(fun h res h' ->
B.modifies B.loc_none h h' /\ U32.v res == L.length (contents_list p h sl pos pos')))
let list_length
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos': U32.t)
: HST.Stack U32.t
(requires (fun h -> valid_list p h sl pos pos'))
(ensures
(fun h res h' ->
B.modifies B.loc_none h h' /\ U32.v res == L.length (contents_list p h sl pos pos'))) = | true | null | false | let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
B.fresh_frame_modifies h0 h1;
let blen:BF.pointer U32.t = BF.alloca 0ul 1ul in
let h2 = HST.get () in
list_fold_left p j sl pos pos' h2 (Ghost.hide (B.loc_buffer blen))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_buffer blen) h2 h /\ B.live h blen /\
(let len = U32.v (Seq.index (B.as_seq h blen) 0) in
len <= U32.v pos1 /\ len == L.length l1))
(fun h l1 l2 pos1 h' ->
B.modifies_only_not_unused_in (B.loc_buffer blen) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2)
(fun pos1 pos2 l1 x l2 ->
B.upd blen 0ul ((B.index blen 0ul) `U32.add` 1ul);
Classical.forall_intro_2 (list_length_append #t));
let len = B.index blen 0ul in
HST.pop_frame ();
len | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [] | [
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Low.Base.jumper",
"LowParse.Slice.slice",
"FStar.UInt32.t",
"Prims.unit",
"FStar.HyperStack.ST.pop_frame",
"LowStar.Monotonic.Buffer.index",
"LowStar.Buffer.trivial_preorder",
"FStar.UInt32.__uint_to_t",
"LowParse.Low.Base.list_fold_left",
"FStar.Ghost.hide",
"LowStar.Monotonic.Buffer.loc",
"LowStar.Monotonic.Buffer.loc_buffer",
"FStar.Monotonic.HyperStack.mem",
"Prims.list",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.live",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.UInt32.v",
"Prims.eq2",
"Prims.int",
"Prims.l_or",
"FStar.UInt.size",
"FStar.UInt32.n",
"Prims.op_GreaterThanOrEqual",
"FStar.List.Tot.Base.length",
"FStar.UInt.uint_t",
"FStar.Seq.Base.index",
"LowStar.Monotonic.Buffer.as_seq",
"LowStar.Monotonic.Buffer.loc_unused_in_not_unused_in_disjoint",
"LowStar.Monotonic.Buffer.modifies_only_not_unused_in",
"FStar.Ghost.erased",
"FStar.Classical.forall_intro_2",
"FStar.List.Tot.Base.append",
"Prims.op_Addition",
"LowParse.Low.Base.Spec.list_length_append",
"LowStar.Monotonic.Buffer.upd",
"FStar.UInt32.add",
"FStar.HyperStack.ST.get",
"LowStar.Buffer.pointer",
"LowStar.Buffer.alloca",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"LowStar.Monotonic.Buffer.fresh_frame_modifies",
"FStar.HyperStack.ST.push_frame",
"LowParse.Low.Base.Spec.valid_list",
"LowStar.Monotonic.Buffer.loc_none",
"LowParse.Low.Base.Spec.contents_list"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_weak
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
U32.v sl.len < U32.v max_uint32 /\
writable sl.base (U32.v pos) (U32.v sl.len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from sl pos) h h' /\ (
if pos' = max_uint32
then U32.v pos + serialized_length s x > U32.v sl.len
else valid_content_pos p h' sl pos x pos'
)))
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_strong
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let sq = B.as_seq h sl.base in
let len = serialized_length s x in
live_slice h sl /\
U32.v pos + len <= U32.v sl.len /\
writable sl.base (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from_to sl pos pos') h h' /\
valid_content_pos p h' sl pos x pos'
))
[@unifier_hint_injective]
inline_for_extraction
let serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(b: B.mbuffer byte rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v pos + len <= B.length b /\
writable b (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b pos (pos `U32.add` len)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x
)))
inline_for_extraction
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
= fun x #rrel #rel b pos -> s1' x b pos
inline_for_extraction
let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(posl: Ghost.erased U32.t)
(posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\
U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos) /\
Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr))
)))
=
let h0 = HST.get () in
writable_weaken b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 (U32.v pos) (U32.v pos + Seq.length (serialize s x));
let res = s32 x b pos in
let h1 = HST.get () in
let pos' = pos `U32.add` res in
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos pos';
writable_modifies b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 B.loc_none h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) (Ghost.reveal posl) pos;
B.loc_disjoint_loc_buffer_from_to b (Ghost.reveal posl) pos pos pos';
B.modifies_buffer_from_to_elim b (Ghost.reveal posl) pos (B.loc_buffer_from_to b pos pos') h0 h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos' (Ghost.reveal posr);
B.loc_disjoint_loc_buffer_from_to b pos pos' pos' (Ghost.reveal posr);
B.modifies_buffer_from_to_elim b pos' (Ghost.reveal posr) (B.loc_buffer_from_to b pos pos') h0 h1;
res
inline_for_extraction
let leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s)
= fun x #rrel #rel input pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let len = s32 x input.base pos in
[@inline_let]
let pos' = pos `U32.add` len in
let h = HST.get () in
[@inline_let] let _ =
let large = bytes_of_slice_from h input pos in
let small = bytes_of_slice_from_to h input pos pos' in
parse_strong_prefix p small large;
valid_facts p h input pos
in
pos'
inline_for_extraction
let leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32
))
: Tot (leaf_writer_weak s)
= fun x #rrel #rel input pos ->
if (input.len `U32.sub` pos) `U32.lt` sz
then max_uint32
else begin
let h = HST.get () in
writable_weaken input.base (U32.v pos) (U32.v input.len) h (U32.v pos) (U32.v pos + U32.v sz);
s32 x input pos
end
inline_for_extraction
let serializer32_of_leaf_writer_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_subkind == Some ParserStrong /\
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
))
: Tot (serializer32 s)
= fun x #rrel #rel b pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let pos' = s32 x (make_slice b (pos `U32.add` sz)) pos in
[@inline_let]
let len = pos' `U32.sub` pos in
let h = HST.get () in
[@inline_let] let _ =
valid_valid_exact p h (make_slice b (pos `U32.add` sz)) pos;
valid_exact_serialize s h (make_slice b (pos `U32.add` sz)) pos pos'
in
len
inline_for_extraction
let blit_strong
(#a:Type) (#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src:U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst:U32.t)
(len:U32.t)
: HST.Stack unit
(requires (fun h ->
B.live h src /\ B.live h dst /\
U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len)) (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)
))
= let h = HST.get () in
B.blit src idx_src dst idx_dst len;
let h' = HST.get () in
B.modifies_loc_buffer_from_to_intro dst idx_dst (idx_dst `U32.add` len) B.loc_none h h'
#push-options "--z3rlimit 16"
inline_for_extraction
let copy_strong
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
U32.v dpos + U32.v spos' - U32.v spos <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from_to dst dpos (dpos `U32.add` (spos' `U32.sub` spos)))
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos' /\
dpos' `U32.sub` dpos == spos' `U32.sub` spos
))
= let h0 = HST.get () in
let len = spos' `U32.sub` spos in
valid_facts p h0 src spos;
writable_replace_subseq_elim dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h0 (Seq.slice (B.as_seq h0 src.base) (U32.v spos) (U32.v spos'));
blit_strong src.base spos dst.base dpos len;
let h = HST.get () in
[@inline_let] let dpos' = dpos `U32.add` len in
parse_strong_prefix p (bytes_of_slice_from h0 src spos) (bytes_of_slice_from h dst dpos);
valid_facts p h dst dpos;
dpos'
#pop-options
inline_for_extraction
let copy_strong'
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\ (
let clen = content_length p h src spos in
U32.v dpos + clen <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + clen) h /\
B.loc_disjoint (loc_slice_from src spos) (loc_slice_from_to dst dpos (dpos `U32.add` (U32.uint_to_t clen)))
)))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
))
= let spos' = j src spos in
copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak_with_length
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= if (dst.len `U32.sub` dpos) `U32.lt` (spos' `U32.sub` spos)
then max_uint32
else copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(jmp: jumper p)
(src: slice rrel1 rel1)
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (content_length p h src spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos (get_valid_pos p h src spos)) (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= let spos' = jmp src spos in
copy_weak_with_length p src spos spos' dst dpos
(* fold_left on lists *)
module BF = LowStar.Buffer
#push-options "--z3rlimit 256 --fuel 1 --ifuel 1"
#restart-solver
inline_for_extraction
let list_fold_left_gen
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(post_interrupt: ((h: HS.mem) -> GTot Type0))
(post_interrupt_frame: (h: HS.mem) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
post_interrupt h
)) (ensures (post_interrupt h')))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
HST.Stack bool
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos1 /\
valid_pos p h0 sl pos1 pos2 /\
valid_list p h0 sl pos2 pos' /\
inv h (contents_list p h0 sl pos pos1) (contents p h0 sl pos1 :: contents_list p h0 sl pos2 pos') pos1
))
(ensures (fun h ctinue h' ->
B.modifies (Ghost.reveal l) h h' /\
(if ctinue then inv h' (contents_list p h0 sl pos pos1 `L.append` [contents p h0 sl pos1]) (contents_list p h0 sl pos2 pos') pos2 else post_interrupt h')
))
))
: HST.Stack bool
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h res h' ->
B.modifies (Ghost.reveal l) h h' /\
(if res then inv h' (contents_list p h sl pos pos') [] pos' else post_interrupt h')
))
= HST.push_frame ();
let h1 = HST.get () in
// B.fresh_frame_modifies h0 h1;
let bpos : BF.pointer U32.t = BF.alloca pos 1ul in
let bctinue : BF.pointer bool = BF.alloca true 1ul in
let btest: BF.pointer bool = BF.alloca (pos `U32.lt` pos') 1ul in
let h2 = HST.get () in
assert (B.modifies B.loc_none h0 h2);
let test_pre (h: HS.mem) : GTot Type0 =
B.live h bpos /\ B.live h bctinue /\ B.live h btest /\ (
let pos1 = Seq.index (B.as_seq h bpos) 0 in
let ctinue = Seq.index (B.as_seq h bctinue) 0 in
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
B.modifies (Ghost.reveal l `B.loc_union` B.loc_region_only true (HS.get_tip h1)) h2 h /\
Seq.index (B.as_seq h btest) 0 == ((U32.v (Seq.index (B.as_seq h bpos) 0) < U32.v pos') && Seq.index (B.as_seq h bctinue) 0) /\
(if ctinue then inv h (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 else post_interrupt h)
)
in
let test_post (cond: bool) (h: HS.mem) : GTot Type0 =
test_pre h /\
cond == Seq.index (B.as_seq h btest) 0
in
valid_list_nil p h0 sl pos;
inv_frame h0 [] (contents_list p h0 sl pos pos') pos h1;
inv_frame h1 [] (contents_list p h0 sl pos pos') pos h2;
[@inline_let]
let while_body () : HST.Stack unit
(requires (fun h -> test_post true h))
(ensures (fun _ _ h1 -> test_pre h1))
=
let h51 = HST.get () in
let pos1 = B.index bpos 0ul in
valid_list_cons_recip p h0 sl pos1 pos';
//assert (B.modifies (Ghost.reveal l `B.loc_union` B.loc_buffer bpos) h0 h51);
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos'));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos pos1));
valid_list_cons_recip p h51 sl pos1 pos';
let pos2 = j sl pos1 in
let h52 = HST.get () in
inv_frame h51 (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 h52;
B.modifies_only_not_unused_in (Ghost.reveal l) h0 h52;
let ctinue = body pos1 pos2 in
let h53 = HST.get () in
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos2 pos'));
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h1));
valid_pos_frame_strong p h0 sl pos1 pos2 (Ghost.reveal l) h53;
valid_list_snoc p h0 sl pos pos1;
B.upd bpos 0ul pos2;
B.upd bctinue 0ul ctinue;
B.upd btest 0ul ((pos2 `U32.lt` pos') && ctinue);
let h54 = HST.get () in
[@inline_let]
let _ =
if ctinue
then inv_frame h53 (contents_list p h0 sl pos pos2) (contents_list p h0 sl pos2 pos') pos2 h54
else post_interrupt_frame h53 h54
in
()
in
C.Loops.while
#test_pre
#test_post
(fun (_: unit) -> (
B.index btest 0ul) <: HST.Stack bool (requires (fun h -> test_pre h)) (ensures (fun h x h1 -> test_post x h1)))
while_body
;
valid_list_nil p h0 sl pos';
let res = B.index bctinue 0ul in
let h3 = HST.get () in
HST.pop_frame ();
let h4 = HST.get () in
//B.popped_modifies h3 h4;
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h3));
[@inline_let]
let _ =
if res
then inv_frame h3 (contents_list p h0 sl pos pos') [] pos' h4
else post_interrupt_frame h3 h4
in
res
#pop-options
//B.loc_includes_union_l (B.loc_all_regions_from false (HS.get_tip h1)) (Ghost.reveal l) (Ghost.reveal l)
//B.modifies_fresh_frame_popped h0 h1 (Ghost.reveal l) h3 h4
module G = FStar.Ghost
inline_for_extraction
let list_fold_left
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
(l1: Ghost.erased (list t)) ->
(x: Ghost.erased t) ->
(l2: Ghost.erased (list t)) ->
HST.Stack unit
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos' /\
valid_content_pos p h0 sl pos1 (G.reveal x) pos2 /\
U32.v pos <= U32.v pos1 /\ U32.v pos2 <= U32.v pos' /\
B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2) /\
inv h (Ghost.reveal l1) (Ghost.reveal x :: Ghost.reveal l2) pos1 /\
contents_list p h0 sl pos pos' == Ghost.reveal l1 `L.append` (Ghost.reveal x :: Ghost.reveal l2)
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (Ghost.reveal l1 `L.append` [contents p h0 sl pos1]) (Ghost.reveal l2) pos2
))
))
: HST.Stack unit
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (contents_list p h sl pos pos') [] pos'
))
= let _ = list_fold_left_gen
p
j
sl
pos pos'
h0
l
inv
inv_frame
(fun _ -> False)
(fun _ _ -> ())
(fun pos1 pos2 ->
let h = HST.get () in
valid_list_cons p h sl pos1 pos';
valid_list_append p h sl pos pos1 pos';
body
pos1
pos2
(Ghost.hide (contents_list p h sl pos pos1))
(Ghost.hide (contents p h sl pos1))
(Ghost.hide (contents_list p h sl pos2 pos'))
;
true
)
in
()
inline_for_extraction
let list_length
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v res == L.length (contents_list p h sl pos pos') | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val list_length
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos': U32.t)
: HST.Stack U32.t
(requires (fun h -> valid_list p h sl pos pos'))
(ensures
(fun h res h' ->
B.modifies B.loc_none h h' /\ U32.v res == L.length (contents_list p h sl pos pos'))) | [] | LowParse.Low.Base.list_length | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
p: LowParse.Spec.Base.parser k t ->
j: LowParse.Low.Base.jumper p ->
sl: LowParse.Slice.slice rrel rel ->
pos: FStar.UInt32.t ->
pos': FStar.UInt32.t
-> FStar.HyperStack.ST.Stack FStar.UInt32.t | {
"end_col": 5,
"end_line": 1408,
"start_col": 1,
"start_line": 1376
} |
Prims.Tot | val jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit{k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U32.v sz})
: Tot (jumper (p ())) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz | val jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit{k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U32.v sz})
: Tot (jumper (p ()))
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit{k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U32.v sz})
: Tot (jumper (p ())) = | false | null | false | fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@@ inline_let ]let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"Prims.unit",
"LowParse.Spec.Base.parser",
"FStar.UInt32.t",
"Prims.l_and",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"Prims.nat",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_high",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_low",
"Prims.int",
"Prims.l_or",
"Prims.b2t",
"Prims.op_GreaterThanOrEqual",
"FStar.UInt.size",
"FStar.UInt32.n",
"FStar.UInt32.v",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"FStar.UInt32.add",
"LowParse.Low.Base.Spec.valid_facts",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowParse.Low.Base.jumper"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
}) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit{k.parser_kind_high == Some k.parser_kind_low /\ k.parser_kind_low == U32.v sz})
: Tot (jumper (p ())) | [] | LowParse.Low.Base.jump_constant_size' | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
p: (_: Prims.unit -> Prims.GTot (LowParse.Spec.Base.parser k t)) ->
sz: FStar.UInt32.t ->
u126:
u128:
Prims.unit
{ Mkparser_kind'?.parser_kind_high k ==
FStar.Pervasives.Native.Some (Mkparser_kind'?.parser_kind_low k) /\
Mkparser_kind'?.parser_kind_low k == FStar.UInt32.v sz }
-> LowParse.Low.Base.jumper (p ()) | {
"end_col": 18,
"end_line": 406,
"start_col": 2,
"start_line": 403
} |
Prims.Tot | val jump_weaken
(k1 #k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 {k1 `is_weaker_than` k2})
: Tot (jumper (weaken k1 p2)) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos | val jump_weaken
(k1 #k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 {k1 `is_weaker_than` k2})
: Tot (jumper (weaken k1 p2))
let jump_weaken
(k1 #k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 {k1 `is_weaker_than` k2})
: Tot (jumper (weaken k1 p2)) = | false | null | false | fun #rrel #rel sl pos ->
let h = HST.get () in
[@@ inline_let ]let _ = valid_weaken k1 p2 h sl pos in
v2 sl pos | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Low.Base.jumper",
"LowParse.Spec.Base.is_weaker_than",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"FStar.UInt32.t",
"Prims.unit",
"LowParse.Low.Base.valid_weaken",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowParse.Spec.Base.weaken"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } ) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val jump_weaken
(k1 #k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 {k1 `is_weaker_than` k2})
: Tot (jumper (weaken k1 p2)) | [] | LowParse.Low.Base.jump_weaken | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
k1: LowParse.Spec.Base.parser_kind ->
v2: LowParse.Low.Base.jumper p2 {LowParse.Spec.Base.is_weaker_than k1 k2}
-> LowParse.Low.Base.jumper (LowParse.Spec.Base.weaken k1 p2) | {
"end_col": 11,
"end_line": 434,
"start_col": 2,
"start_line": 429
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "BF"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let compl_t (t: Type) = U32.t -> t -> U32.t -> Tot (B.spred byte) | let compl_t (t: Type) = | false | null | false | U32.t -> t -> U32.t -> Tot (B.spred byte) | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [
"total"
] | [
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.spred",
"LowParse.Bytes.byte"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_weak
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
U32.v sl.len < U32.v max_uint32 /\
writable sl.base (U32.v pos) (U32.v sl.len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from sl pos) h h' /\ (
if pos' = max_uint32
then U32.v pos + serialized_length s x > U32.v sl.len
else valid_content_pos p h' sl pos x pos'
)))
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_strong
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let sq = B.as_seq h sl.base in
let len = serialized_length s x in
live_slice h sl /\
U32.v pos + len <= U32.v sl.len /\
writable sl.base (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from_to sl pos pos') h h' /\
valid_content_pos p h' sl pos x pos'
))
[@unifier_hint_injective]
inline_for_extraction
let serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(b: B.mbuffer byte rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v pos + len <= B.length b /\
writable b (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b pos (pos `U32.add` len)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x
)))
inline_for_extraction
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
= fun x #rrel #rel b pos -> s1' x b pos
inline_for_extraction
let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(posl: Ghost.erased U32.t)
(posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\
U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos) /\
Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr))
)))
=
let h0 = HST.get () in
writable_weaken b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 (U32.v pos) (U32.v pos + Seq.length (serialize s x));
let res = s32 x b pos in
let h1 = HST.get () in
let pos' = pos `U32.add` res in
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos pos';
writable_modifies b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 B.loc_none h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) (Ghost.reveal posl) pos;
B.loc_disjoint_loc_buffer_from_to b (Ghost.reveal posl) pos pos pos';
B.modifies_buffer_from_to_elim b (Ghost.reveal posl) pos (B.loc_buffer_from_to b pos pos') h0 h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos' (Ghost.reveal posr);
B.loc_disjoint_loc_buffer_from_to b pos pos' pos' (Ghost.reveal posr);
B.modifies_buffer_from_to_elim b pos' (Ghost.reveal posr) (B.loc_buffer_from_to b pos pos') h0 h1;
res
inline_for_extraction
let leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s)
= fun x #rrel #rel input pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let len = s32 x input.base pos in
[@inline_let]
let pos' = pos `U32.add` len in
let h = HST.get () in
[@inline_let] let _ =
let large = bytes_of_slice_from h input pos in
let small = bytes_of_slice_from_to h input pos pos' in
parse_strong_prefix p small large;
valid_facts p h input pos
in
pos'
inline_for_extraction
let leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32
))
: Tot (leaf_writer_weak s)
= fun x #rrel #rel input pos ->
if (input.len `U32.sub` pos) `U32.lt` sz
then max_uint32
else begin
let h = HST.get () in
writable_weaken input.base (U32.v pos) (U32.v input.len) h (U32.v pos) (U32.v pos + U32.v sz);
s32 x input pos
end
inline_for_extraction
let serializer32_of_leaf_writer_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_subkind == Some ParserStrong /\
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
))
: Tot (serializer32 s)
= fun x #rrel #rel b pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let pos' = s32 x (make_slice b (pos `U32.add` sz)) pos in
[@inline_let]
let len = pos' `U32.sub` pos in
let h = HST.get () in
[@inline_let] let _ =
valid_valid_exact p h (make_slice b (pos `U32.add` sz)) pos;
valid_exact_serialize s h (make_slice b (pos `U32.add` sz)) pos pos'
in
len
inline_for_extraction
let blit_strong
(#a:Type) (#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src:U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst:U32.t)
(len:U32.t)
: HST.Stack unit
(requires (fun h ->
B.live h src /\ B.live h dst /\
U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len)) (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)
))
= let h = HST.get () in
B.blit src idx_src dst idx_dst len;
let h' = HST.get () in
B.modifies_loc_buffer_from_to_intro dst idx_dst (idx_dst `U32.add` len) B.loc_none h h'
#push-options "--z3rlimit 16"
inline_for_extraction
let copy_strong
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
U32.v dpos + U32.v spos' - U32.v spos <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from_to dst dpos (dpos `U32.add` (spos' `U32.sub` spos)))
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos' /\
dpos' `U32.sub` dpos == spos' `U32.sub` spos
))
= let h0 = HST.get () in
let len = spos' `U32.sub` spos in
valid_facts p h0 src spos;
writable_replace_subseq_elim dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h0 (Seq.slice (B.as_seq h0 src.base) (U32.v spos) (U32.v spos'));
blit_strong src.base spos dst.base dpos len;
let h = HST.get () in
[@inline_let] let dpos' = dpos `U32.add` len in
parse_strong_prefix p (bytes_of_slice_from h0 src spos) (bytes_of_slice_from h dst dpos);
valid_facts p h dst dpos;
dpos'
#pop-options
inline_for_extraction
let copy_strong'
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\ (
let clen = content_length p h src spos in
U32.v dpos + clen <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + clen) h /\
B.loc_disjoint (loc_slice_from src spos) (loc_slice_from_to dst dpos (dpos `U32.add` (U32.uint_to_t clen)))
)))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
))
= let spos' = j src spos in
copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak_with_length
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= if (dst.len `U32.sub` dpos) `U32.lt` (spos' `U32.sub` spos)
then max_uint32
else copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(jmp: jumper p)
(src: slice rrel1 rel1)
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (content_length p h src spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos (get_valid_pos p h src spos)) (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= let spos' = jmp src spos in
copy_weak_with_length p src spos spos' dst dpos
(* fold_left on lists *)
module BF = LowStar.Buffer
#push-options "--z3rlimit 256 --fuel 1 --ifuel 1"
#restart-solver
inline_for_extraction
let list_fold_left_gen
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(post_interrupt: ((h: HS.mem) -> GTot Type0))
(post_interrupt_frame: (h: HS.mem) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
post_interrupt h
)) (ensures (post_interrupt h')))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
HST.Stack bool
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos1 /\
valid_pos p h0 sl pos1 pos2 /\
valid_list p h0 sl pos2 pos' /\
inv h (contents_list p h0 sl pos pos1) (contents p h0 sl pos1 :: contents_list p h0 sl pos2 pos') pos1
))
(ensures (fun h ctinue h' ->
B.modifies (Ghost.reveal l) h h' /\
(if ctinue then inv h' (contents_list p h0 sl pos pos1 `L.append` [contents p h0 sl pos1]) (contents_list p h0 sl pos2 pos') pos2 else post_interrupt h')
))
))
: HST.Stack bool
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h res h' ->
B.modifies (Ghost.reveal l) h h' /\
(if res then inv h' (contents_list p h sl pos pos') [] pos' else post_interrupt h')
))
= HST.push_frame ();
let h1 = HST.get () in
// B.fresh_frame_modifies h0 h1;
let bpos : BF.pointer U32.t = BF.alloca pos 1ul in
let bctinue : BF.pointer bool = BF.alloca true 1ul in
let btest: BF.pointer bool = BF.alloca (pos `U32.lt` pos') 1ul in
let h2 = HST.get () in
assert (B.modifies B.loc_none h0 h2);
let test_pre (h: HS.mem) : GTot Type0 =
B.live h bpos /\ B.live h bctinue /\ B.live h btest /\ (
let pos1 = Seq.index (B.as_seq h bpos) 0 in
let ctinue = Seq.index (B.as_seq h bctinue) 0 in
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
B.modifies (Ghost.reveal l `B.loc_union` B.loc_region_only true (HS.get_tip h1)) h2 h /\
Seq.index (B.as_seq h btest) 0 == ((U32.v (Seq.index (B.as_seq h bpos) 0) < U32.v pos') && Seq.index (B.as_seq h bctinue) 0) /\
(if ctinue then inv h (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 else post_interrupt h)
)
in
let test_post (cond: bool) (h: HS.mem) : GTot Type0 =
test_pre h /\
cond == Seq.index (B.as_seq h btest) 0
in
valid_list_nil p h0 sl pos;
inv_frame h0 [] (contents_list p h0 sl pos pos') pos h1;
inv_frame h1 [] (contents_list p h0 sl pos pos') pos h2;
[@inline_let]
let while_body () : HST.Stack unit
(requires (fun h -> test_post true h))
(ensures (fun _ _ h1 -> test_pre h1))
=
let h51 = HST.get () in
let pos1 = B.index bpos 0ul in
valid_list_cons_recip p h0 sl pos1 pos';
//assert (B.modifies (Ghost.reveal l `B.loc_union` B.loc_buffer bpos) h0 h51);
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos'));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos pos1));
valid_list_cons_recip p h51 sl pos1 pos';
let pos2 = j sl pos1 in
let h52 = HST.get () in
inv_frame h51 (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 h52;
B.modifies_only_not_unused_in (Ghost.reveal l) h0 h52;
let ctinue = body pos1 pos2 in
let h53 = HST.get () in
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos2 pos'));
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h1));
valid_pos_frame_strong p h0 sl pos1 pos2 (Ghost.reveal l) h53;
valid_list_snoc p h0 sl pos pos1;
B.upd bpos 0ul pos2;
B.upd bctinue 0ul ctinue;
B.upd btest 0ul ((pos2 `U32.lt` pos') && ctinue);
let h54 = HST.get () in
[@inline_let]
let _ =
if ctinue
then inv_frame h53 (contents_list p h0 sl pos pos2) (contents_list p h0 sl pos2 pos') pos2 h54
else post_interrupt_frame h53 h54
in
()
in
C.Loops.while
#test_pre
#test_post
(fun (_: unit) -> (
B.index btest 0ul) <: HST.Stack bool (requires (fun h -> test_pre h)) (ensures (fun h x h1 -> test_post x h1)))
while_body
;
valid_list_nil p h0 sl pos';
let res = B.index bctinue 0ul in
let h3 = HST.get () in
HST.pop_frame ();
let h4 = HST.get () in
//B.popped_modifies h3 h4;
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h3));
[@inline_let]
let _ =
if res
then inv_frame h3 (contents_list p h0 sl pos pos') [] pos' h4
else post_interrupt_frame h3 h4
in
res
#pop-options
//B.loc_includes_union_l (B.loc_all_regions_from false (HS.get_tip h1)) (Ghost.reveal l) (Ghost.reveal l)
//B.modifies_fresh_frame_popped h0 h1 (Ghost.reveal l) h3 h4
module G = FStar.Ghost
inline_for_extraction
let list_fold_left
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
(l1: Ghost.erased (list t)) ->
(x: Ghost.erased t) ->
(l2: Ghost.erased (list t)) ->
HST.Stack unit
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos' /\
valid_content_pos p h0 sl pos1 (G.reveal x) pos2 /\
U32.v pos <= U32.v pos1 /\ U32.v pos2 <= U32.v pos' /\
B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2) /\
inv h (Ghost.reveal l1) (Ghost.reveal x :: Ghost.reveal l2) pos1 /\
contents_list p h0 sl pos pos' == Ghost.reveal l1 `L.append` (Ghost.reveal x :: Ghost.reveal l2)
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (Ghost.reveal l1 `L.append` [contents p h0 sl pos1]) (Ghost.reveal l2) pos2
))
))
: HST.Stack unit
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (contents_list p h sl pos pos') [] pos'
))
= let _ = list_fold_left_gen
p
j
sl
pos pos'
h0
l
inv
inv_frame
(fun _ -> False)
(fun _ _ -> ())
(fun pos1 pos2 ->
let h = HST.get () in
valid_list_cons p h sl pos1 pos';
valid_list_append p h sl pos pos1 pos';
body
pos1
pos2
(Ghost.hide (contents_list p h sl pos pos1))
(Ghost.hide (contents p h sl pos1))
(Ghost.hide (contents_list p h sl pos2 pos'))
;
true
)
in
()
inline_for_extraction
let list_length
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v res == L.length (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
B.fresh_frame_modifies h0 h1;
let blen : BF.pointer U32.t = BF.alloca 0ul 1ul in
let h2 = HST.get () in
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer blen))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_buffer blen) h2 h /\
B.live h blen /\ (
let len = U32.v (Seq.index (B.as_seq h blen) 0) in
len <= U32.v pos1 /\ // necessary to prove that length computations do not overflow
len == L.length l1
))
(fun h l1 l2 pos1 h' ->
B.modifies_only_not_unused_in (B.loc_buffer blen) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
B.upd blen 0ul (B.index blen 0ul `U32.add` 1ul);
Classical.forall_intro_2 (list_length_append #t)
)
;
let len = B.index blen 0ul in
HST.pop_frame ();
len
#push-options "--z3rlimit 32 --fuel 2 --ifuel 1"
inline_for_extraction
let list_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(f: (t -> Tot bool))
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
(x: Ghost.erased t) ->
HST.Stack bool
(requires (fun h -> valid_content p h sl pos (G.reveal x)))
(ensures (fun h res h' -> B.modifies B.loc_none h h' /\ res == f (G.reveal x)))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
(#rrel_out #rel_out: _)
(sl_out : slice rrel_out rel_out)
(pos_out : U32.t)
: HST.Stack U32.t
(requires (fun h ->
U32.v pos_out + U32.v pos' - U32.v pos <= U32.v sl_out.len /\
valid_list p h sl pos pos' /\
B.loc_disjoint (loc_slice_from_to sl pos pos') (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
live_slice h sl_out
))
(ensures (fun h pos_out' h' ->
B.modifies (loc_slice_from_to sl_out pos_out pos_out') h h' /\
U32.v pos_out' - U32.v pos_out <= U32.v pos' - U32.v pos /\
valid_list p h' sl_out pos_out pos_out' /\
contents_list p h' sl_out pos_out pos_out' == L.filter f (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
//B.fresh_frame_modifies h0 h1;
let bpos_out' : BF.pointer U32.t = BF.alloca pos_out 1ul in
let h2 = HST.get () in
let inv (h: HS.mem) (l1 l2: list t) (pos1: U32.t) : GTot Type0 =
B.live h bpos_out' /\ (
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
valid_list p h sl_out pos_out pos_out' /\
contents_list p h sl_out pos_out pos_out' == L.filter f l1 /\
U32.v pos_out' - U32.v pos1 <= U32.v pos_out - U32.v pos // necessary to prove that length computations do not overflow
)
in
valid_list_nil p h2 sl_out pos_out;
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))))
inv
(fun h l1 l2 pos1 h' ->
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies_only_not_unused_in (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
let pos_out1 = B.index bpos_out' 0ul in
list_filter_append f (G.reveal l1) [G.reveal x];
if f' sl pos1 x
then begin
assert (B.loc_includes (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) (loc_slice_from_to sl_out pos_out1 (pos_out1 `U32.add` (pos2 `U32.sub` pos1))));
let h = HST.get () in
writable_weaken sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (U32.v pos_out1) (U32.v pos_out1 + (U32.v pos2 - U32.v pos1));
let pos_out2 = copy_strong p sl pos1 pos2 sl_out pos_out1 in
B.upd bpos_out' 0ul pos_out2;
let h' = HST.get () in
writable_modifies sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (B.loc_region_only true (HS.get_tip h1)) h';
valid_list_nil p h' sl_out pos_out2;
valid_list_cons p h' sl_out pos_out1 pos_out2;
valid_list_append p h' sl_out pos_out pos_out1 pos_out2
end else
L.append_l_nil (L.filter f (G.reveal l1))
)
;
let pos_out' = B.index bpos_out' 0ul in
HST.pop_frame ();
pos_out'
#pop-options
#push-options "--z3rlimit 64 --fuel 2 --ifuel 1"
inline_for_extraction
let list_nth
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(i: U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos' /\
U32.v i < L.length (contents_list p h sl pos pos')
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
valid_list p h sl pos res /\
valid p h sl res /\
valid_list p h sl (get_valid_pos p h sl res) pos' /\
L.length (contents_list p h sl pos res) == U32.v i /\
contents p h sl res == L.index (contents_list p h sl pos pos') (U32.v i)
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bpos1 = BF.alloca pos 1ul in
let bk = BF.alloca 0ul 1ul in
let h2 = HST.get () in
valid_list_nil p h0 sl pos;
let _ : bool = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
let k = Seq.index (B.as_seq h bk) 0 in
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bpos1 /\
B.live h bk /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v k /\
U32.v k <= U32.v i
)
(fun h _ _ _ h' ->
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bpos1);
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bk);
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
let pos1 = Seq.index (B.as_seq h bpos1) 0 in
B.live h bpos1 /\
valid p h0 sl pos1 /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl (get_valid_pos p h0 sl pos1) pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v i /\
contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i)
)
(fun _ _ ->
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 ->
let k = B.index bk 0ul in
if k = i
then begin
B.upd bpos1 0ul pos1;
valid_list_cons_recip p h0 sl pos1 pos';
list_index_append (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') (U32.v i);
valid_list_append p h0 sl pos pos1 pos' ;
assert (contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i));
false
end else begin
B.upd bk 0ul (k `U32.add` 1ul);
let h = HST.get () in
B.modifies_only_not_unused_in B.loc_none h0 h;
valid_list_snoc p h0 sl pos pos1;
assert (valid p h0 sl pos1);
assert (pos2 == get_valid_pos p h0 sl pos1);
assert (valid_list p h0 sl pos pos2);
list_length_append (contents_list p h0 sl pos pos1) [contents p h0 sl pos1];
true
end
)
in
let res = B.index bpos1 0ul in
HST.pop_frame ();
res
inline_for_extraction
let list_find
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(f: (t -> Tot bool)) // should be GTot, but List.find requires Tot
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack bool
(requires (fun h ->
valid p h sl pos
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == f (contents p h sl pos)
))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let l = contents_list p h sl pos pos' in
if res = pos'
then L.find f l == None
else
U32.v pos <= U32.v res /\
valid p h sl res /\ (
let x = contents p h sl res in
U32.v res + content_length p h sl res <= U32.v pos' /\
f x == true /\
L.find f l == Some x
)
)))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bres = BF.alloca 0ul 1ul in
let h2 = HST.get () in
let not_found = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\
valid_list p h0 sl pos1 pos' /\
l2 == contents_list p h0 sl pos1 pos' /\
L.find f (contents_list p h0 sl pos pos') == L.find f l2
)
(fun h _ _ _ h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\ (
let res = Seq.index (B.as_seq h bres) 0 in
U32.v pos <= U32.v res /\
valid p h0 sl res /\ (
let x = contents p h0 sl res in
U32.v res + content_length p h0 sl res <= U32.v pos' /\
f x == true /\
L.find f (contents_list p h0 sl pos pos') == Some x
)))
(fun h h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun pos1 pos2 ->
if f' sl pos1
then begin
B.upd bres 0ul pos1;
false
end
else true
)
in
let res =
if not_found
then pos'
else B.index bres 0ul
in
HST.pop_frame ();
res
#pop-options
let rec list_existsb_find
(#a: Type)
(f: (a -> Tot bool))
(l: list a)
: Lemma
(L.existsb f l == Some? (L.find f l))
= match l with
| [] -> ()
| x :: q ->
if f x
then ()
else list_existsb_find f q
inline_for_extraction
noextract
let list_existsb
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(f: (t -> Tot bool)) // should be GTot, but List.find requires Tot
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack bool
(requires (fun h ->
valid p h sl pos
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == f (contents p h sl pos)
))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack bool
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == L.existsb f (contents_list p h sl pos pos')
))
= let h = HST.get () in
list_existsb_find f (contents_list p h sl pos pos');
let posn = list_find j f f' sl pos pos' in
posn <> pos'
#push-options "--fuel 2 --ifuel 1 --z3rlimit 256 --query_stats"
inline_for_extraction
noextract
let list_flatten_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 { k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0 } )
(f: (t1 -> Tot (list t2))) // should be GTot, but List.Tot.map requires Tot
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1' : U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2: U32.t {
valid_list p1 h0 sl1 pos1 pos1' /\
U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\
U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32
})
(f' : (
(pos1_: U32.t) ->
(pos2_: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\
U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\ (
let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2_ res /\
contents_list p2 h' sl2 pos2_ res == y
)))
))
: HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\ (
let y = List.Tot.flatten (List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1')) in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2 res /\
contents_list p2 h' sl2 pos2 res == y
)))
= let hz = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bpos2_ = BF.alloca pos2 1ul in
let h2 = HST.get () in
valid_list_nil p2 hz sl2 pos2;
let fits = list_fold_left_gen
p1
j1
sl1
pos1 pos1'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2))
(fun h ll lr _ ->
B.modifies (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h /\
B.live h bpos2_ /\ (
let pos2_ = Seq.index (B.as_seq h bpos2_) 0 in
contents_list p1 h0 sl1 pos1 pos1' == ll `List.Tot.append` lr /\
valid_list p2 h sl2 pos2 pos2_ /\
contents_list p2 h sl2 pos2 pos2_ == List.Tot.flatten (List.Tot.map f ll) /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(fun h _ _ _ h' ->
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun h ->
B.modifies (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h /\
U32.v pos2 + serialized_list_length s2 (List.Tot.flatten (List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1'))) > U32.v sl2.len
)
(fun h h' ->
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1l pos1r ->
let pos2_ = B.index bpos2_ 0ul in
let h = HST.get () in
writable_weaken sl2.base (U32.v pos2) (U32.v sl2.len) h (U32.v pos2_) (U32.v sl2.len);
valid_pos_frame_strong p1 h0 sl1 pos1l pos1r (loc_slice_from sl2 pos2) hz;
let res = f' pos1l pos2_ in
let fits = not (res = max_uint32) in
if fits then begin
B.upd bpos2_ 0ul res;
let h' = HST.get () in
writable_modifies sl2.base (U32.v pos2) (U32.v sl2.len) h (B.loc_region_only true (HS.get_tip h1)) h' ;
List.Tot.append_assoc (contents_list p1 h0 sl1 pos1 pos1l) [contents p1 h0 sl1 pos1l] (contents_list p1 h0 sl1 pos1r pos1');
list_flatten_map_append f (contents_list p1 h0 sl1 pos1 pos1l) [contents p1 h0 sl1 pos1l];
valid_list_snoc p1 h0 sl1 pos1 pos1l;
valid_list_append p2 h' sl2 pos2 pos2_ res;
valid_list_nil p2 h' sl2 res;
valid_list_append p2 h' sl2 pos2_ res res
end else begin
let h' = HST.get () in
valid_list_cons p1 h0 sl1 pos1l pos1' ;
valid_list_append p1 h0 sl1 pos1 pos1l pos1' ;
list_flatten_map_append f (contents_list p1 h0 sl1 pos1 pos1l) (contents_list p1 h0 sl1 pos1l pos1');
serialized_list_length_append s2 (L.flatten (L.map f (contents_list p1 h0 sl1 pos1 pos1l))) (L.flatten (L.map f (contents_list p1 h0 sl1 pos1l pos1')));
serialized_list_length_append s2 (f (contents p1 h0 sl1 pos1l)) (L.flatten (L.map f (contents_list p1 h0 sl1 pos1r pos1')));
valid_list_serialized_list_length s2 h' sl2 pos2 pos2_
end;
fits
)
in
let res =
if fits
then B.index bpos2_ 0ul
else max_uint32
in
HST.pop_frame ();
res
#pop-options
#push-options "--z3rlimit 16 --query_stats"
inline_for_extraction
noextract
let list_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 { k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0 } )
(f: (t1 -> Tot t2)) // should be GTot, but List.Tot.map requires Tot
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1' : U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2: U32.t {
valid_list p1 h0 sl1 pos1 pos1' /\
U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\
U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32
})
(f' : (
(pos1_: U32.t) ->
(pos2_: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\
U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\ (
let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_length s2 y > U32.v sl2.len
else
valid_content_pos p2 h' sl2 pos2_ y res
)))
))
: HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\ (
let y = List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1') in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2 res /\
contents_list p2 h' sl2 pos2 res == y
)))
= list_map_list_flatten_map f (contents_list p1 h0 sl1 pos1 pos1');
list_flatten_map
j1
s2
(fun x -> [f x])
h0
sl1 pos1 pos1'
sl2 pos2
(fun pos1 pos2 ->
let res = f' pos1 pos2 in
let h = HST.get () in
if res = max_uint32
then begin
serialized_list_length_nil s2;
serialized_list_length_cons s2 (f (contents p1 h0 sl1 pos1)) []
end
else begin
valid_list_nil p2 h sl2 res;
valid_list_cons p2 h sl2 pos2 res
end;
res
)
(* Example: trivial printers *)
inline_for_extraction
let print_list
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(print: ((#rrel: _) -> (#rel: _) -> (sl: slice rrel rel) -> (pos: U32.t) -> HST.Stack unit (requires (fun h -> valid p h sl pos)) (ensures (fun h _ h' -> B.modifies B.loc_none h h'))))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack unit
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h _ h' ->
B.modifies B.loc_none h h'
))
= let h0 = HST.get () in
list_fold_left
p
j
sl
pos pos'
h0
(Ghost.hide B.loc_none)
(fun _ _ _ _ -> True)
(fun _ _ _ _ _ -> ())
(fun pos1 _ _ _ _ ->
print sl pos1
)
(* Monotonicity *) | false | true | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val compl_t : t: Type -> Type | [] | LowParse.Low.Base.compl_t | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | t: Type -> Type | {
"end_col": 65,
"end_line": 2008,
"start_col": 24,
"start_line": 2008
} |
|
FStar.HyperStack.ST.Stack | val copy_strong
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1)
(spos spos': U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires
(fun h ->
k.parser_kind_subkind == Some ParserStrong /\ valid_pos p h src spos spos' /\
U32.v dpos + U32.v spos' - U32.v spos <= U32.v dst.len /\ live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos')
(loc_slice_from_to dst dpos (dpos `U32.add` (spos' `U32.sub` spos)))))
(ensures
(fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos' /\
dpos' `U32.sub` dpos == spos' `U32.sub` spos)) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let copy_strong
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
U32.v dpos + U32.v spos' - U32.v spos <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from_to dst dpos (dpos `U32.add` (spos' `U32.sub` spos)))
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos' /\
dpos' `U32.sub` dpos == spos' `U32.sub` spos
))
= let h0 = HST.get () in
let len = spos' `U32.sub` spos in
valid_facts p h0 src spos;
writable_replace_subseq_elim dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h0 (Seq.slice (B.as_seq h0 src.base) (U32.v spos) (U32.v spos'));
blit_strong src.base spos dst.base dpos len;
let h = HST.get () in
[@inline_let] let dpos' = dpos `U32.add` len in
parse_strong_prefix p (bytes_of_slice_from h0 src spos) (bytes_of_slice_from h dst dpos);
valid_facts p h dst dpos;
dpos' | val copy_strong
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1)
(spos spos': U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires
(fun h ->
k.parser_kind_subkind == Some ParserStrong /\ valid_pos p h src spos spos' /\
U32.v dpos + U32.v spos' - U32.v spos <= U32.v dst.len /\ live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos')
(loc_slice_from_to dst dpos (dpos `U32.add` (spos' `U32.sub` spos)))))
(ensures
(fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos' /\
dpos' `U32.sub` dpos == spos' `U32.sub` spos))
let copy_strong
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1)
(spos spos': U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires
(fun h ->
k.parser_kind_subkind == Some ParserStrong /\ valid_pos p h src spos spos' /\
U32.v dpos + U32.v spos' - U32.v spos <= U32.v dst.len /\ live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos')
(loc_slice_from_to dst dpos (dpos `U32.add` (spos' `U32.sub` spos)))))
(ensures
(fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos' /\
dpos' `U32.sub` dpos == spos' `U32.sub` spos)) = | true | null | false | let h0 = HST.get () in
let len = spos' `U32.sub` spos in
valid_facts p h0 src spos;
writable_replace_subseq_elim dst.base
(U32.v dpos)
(U32.v dpos + (U32.v spos' - U32.v spos))
h0
(Seq.slice (B.as_seq h0 src.base) (U32.v spos) (U32.v spos'));
blit_strong src.base spos dst.base dpos len;
let h = HST.get () in
[@@ inline_let ]let dpos' = dpos `U32.add` len in
parse_strong_prefix p (bytes_of_slice_from h0 src spos) (bytes_of_slice_from h dst dpos);
valid_facts p h dst dpos;
dpos' | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [] | [
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Slice.slice",
"FStar.UInt32.t",
"Prims.unit",
"LowParse.Low.Base.Spec.valid_facts",
"LowParse.Spec.Base.parse_strong_prefix",
"LowParse.Slice.bytes_of_slice_from",
"FStar.UInt32.add",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowParse.Low.Base.blit_strong",
"LowParse.Slice.buffer_srel_of_srel",
"LowParse.Slice.__proj__Mkslice__item__base",
"LowParse.Low.Base.writable_replace_subseq_elim",
"FStar.UInt32.v",
"Prims.op_Addition",
"Prims.op_Subtraction",
"FStar.Seq.Base.slice",
"LowStar.Monotonic.Buffer.as_seq",
"FStar.UInt32.sub",
"Prims.l_and",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"LowParse.Low.Base.Spec.valid_pos",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"LowParse.Slice.__proj__Mkslice__item__len",
"LowParse.Slice.live_slice",
"LowParse.Low.Base.writable",
"LowStar.Monotonic.Buffer.loc_disjoint",
"LowParse.Slice.loc_slice_from_to",
"LowStar.Monotonic.Buffer.modifies",
"LowParse.Low.Base.Spec.valid_content_pos",
"LowParse.Low.Base.Spec.contents"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_weak
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
U32.v sl.len < U32.v max_uint32 /\
writable sl.base (U32.v pos) (U32.v sl.len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from sl pos) h h' /\ (
if pos' = max_uint32
then U32.v pos + serialized_length s x > U32.v sl.len
else valid_content_pos p h' sl pos x pos'
)))
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_strong
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let sq = B.as_seq h sl.base in
let len = serialized_length s x in
live_slice h sl /\
U32.v pos + len <= U32.v sl.len /\
writable sl.base (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from_to sl pos pos') h h' /\
valid_content_pos p h' sl pos x pos'
))
[@unifier_hint_injective]
inline_for_extraction
let serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(b: B.mbuffer byte rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v pos + len <= B.length b /\
writable b (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b pos (pos `U32.add` len)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x
)))
inline_for_extraction
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
= fun x #rrel #rel b pos -> s1' x b pos
inline_for_extraction
let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(posl: Ghost.erased U32.t)
(posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\
U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos) /\
Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr))
)))
=
let h0 = HST.get () in
writable_weaken b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 (U32.v pos) (U32.v pos + Seq.length (serialize s x));
let res = s32 x b pos in
let h1 = HST.get () in
let pos' = pos `U32.add` res in
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos pos';
writable_modifies b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 B.loc_none h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) (Ghost.reveal posl) pos;
B.loc_disjoint_loc_buffer_from_to b (Ghost.reveal posl) pos pos pos';
B.modifies_buffer_from_to_elim b (Ghost.reveal posl) pos (B.loc_buffer_from_to b pos pos') h0 h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos' (Ghost.reveal posr);
B.loc_disjoint_loc_buffer_from_to b pos pos' pos' (Ghost.reveal posr);
B.modifies_buffer_from_to_elim b pos' (Ghost.reveal posr) (B.loc_buffer_from_to b pos pos') h0 h1;
res
inline_for_extraction
let leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s)
= fun x #rrel #rel input pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let len = s32 x input.base pos in
[@inline_let]
let pos' = pos `U32.add` len in
let h = HST.get () in
[@inline_let] let _ =
let large = bytes_of_slice_from h input pos in
let small = bytes_of_slice_from_to h input pos pos' in
parse_strong_prefix p small large;
valid_facts p h input pos
in
pos'
inline_for_extraction
let leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32
))
: Tot (leaf_writer_weak s)
= fun x #rrel #rel input pos ->
if (input.len `U32.sub` pos) `U32.lt` sz
then max_uint32
else begin
let h = HST.get () in
writable_weaken input.base (U32.v pos) (U32.v input.len) h (U32.v pos) (U32.v pos + U32.v sz);
s32 x input pos
end
inline_for_extraction
let serializer32_of_leaf_writer_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_subkind == Some ParserStrong /\
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
))
: Tot (serializer32 s)
= fun x #rrel #rel b pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let pos' = s32 x (make_slice b (pos `U32.add` sz)) pos in
[@inline_let]
let len = pos' `U32.sub` pos in
let h = HST.get () in
[@inline_let] let _ =
valid_valid_exact p h (make_slice b (pos `U32.add` sz)) pos;
valid_exact_serialize s h (make_slice b (pos `U32.add` sz)) pos pos'
in
len
inline_for_extraction
let blit_strong
(#a:Type) (#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src:U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst:U32.t)
(len:U32.t)
: HST.Stack unit
(requires (fun h ->
B.live h src /\ B.live h dst /\
U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len)) (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)
))
= let h = HST.get () in
B.blit src idx_src dst idx_dst len;
let h' = HST.get () in
B.modifies_loc_buffer_from_to_intro dst idx_dst (idx_dst `U32.add` len) B.loc_none h h'
#push-options "--z3rlimit 16"
inline_for_extraction
let copy_strong
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
U32.v dpos + U32.v spos' - U32.v spos <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from_to dst dpos (dpos `U32.add` (spos' `U32.sub` spos)))
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos' /\
dpos' `U32.sub` dpos == spos' `U32.sub` spos | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val copy_strong
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1)
(spos spos': U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires
(fun h ->
k.parser_kind_subkind == Some ParserStrong /\ valid_pos p h src spos spos' /\
U32.v dpos + U32.v spos' - U32.v spos <= U32.v dst.len /\ live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos')
(loc_slice_from_to dst dpos (dpos `U32.add` (spos' `U32.sub` spos)))))
(ensures
(fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos' /\
dpos' `U32.sub` dpos == spos' `U32.sub` spos)) | [] | LowParse.Low.Base.copy_strong | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
p: LowParse.Spec.Base.parser k t ->
src: LowParse.Slice.slice rrel1 rel1 ->
spos: FStar.UInt32.t ->
spos': FStar.UInt32.t ->
dst: LowParse.Slice.slice rrel2 rel2 ->
dpos: FStar.UInt32.t
-> FStar.HyperStack.ST.Stack FStar.UInt32.t | {
"end_col": 7,
"end_line": 1046,
"start_col": 1,
"start_line": 1037
} |
Prims.Ghost | val irepr_pos'
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(#s: slice rrel rel)
(#compl: compl_t t)
(x: irepr p s compl)
: Ghost U32.t (requires True) (ensures (fun y -> True)) | [
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "BF"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Int.Cast",
"short_module": "Cast"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.Math",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "LowParse.Low.ErrorCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Low",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let irepr_pos'
(#t: Type) (#k: parser_kind) (#p: parser k t) (#rrel #rel: _) (#s: slice rrel rel) (#compl: compl_t t) (x: irepr p s compl) : Ghost U32.t
(requires True)
(ensures (fun y -> True))
= Ghost.reveal (IRepr?.gpos' x) | val irepr_pos'
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(#s: slice rrel rel)
(#compl: compl_t t)
(x: irepr p s compl)
: Ghost U32.t (requires True) (ensures (fun y -> True))
let irepr_pos'
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(#s: slice rrel rel)
(#compl: compl_t t)
(x: irepr p s compl)
: Ghost U32.t (requires True) (ensures (fun y -> True)) = | false | null | false | Ghost.reveal (IRepr?.gpos' x) | {
"checked_file": "LowParse.Low.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.Comment.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Math.fst.checked",
"LowParse.Low.ErrorCode.fst.checked",
"LowParse.Low.Base.Spec.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Low.Base.fst"
} | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"LowParse.Low.Base.compl_t",
"LowParse.Low.Base.irepr",
"FStar.Ghost.reveal",
"FStar.UInt32.t",
"LowParse.Low.Base.__proj__IRepr__item__gpos'",
"Prims.l_True"
] | [] | module LowParse.Low.Base
include LowParse.Low.Base.Spec
include LowParse.Low.ErrorCode
module M = LowParse.Math
module B = LowStar.Monotonic.Buffer
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module Seq = FStar.Seq
module Cast = FStar.Int.Cast
module L = FStar.List.Tot
[@unifier_hint_injective]
inline_for_extraction
let accessor
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(g: gaccessor p1 p2 cl)
: Tot Type
= (#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
valid p1 h sl pos /\
cl.clens_cond (contents p1 h sl pos)
))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
pos' == slice_access h g sl pos
))
#push-options "--z3rlimit 16"
inline_for_extraction
let make_accessor_from_pure
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
($g: gaccessor p1 p2 cl)
(f: (
(input: Ghost.erased bytes) ->
Pure U32.t
(requires (Seq.length (Ghost.reveal input) < 4294967296 /\ gaccessor_pre p1 p2 cl (Ghost.reveal input)))
(ensures (fun y -> U32.v y == (g (Ghost.reveal input))))
))
: Tot (accessor g)
= fun #rrel #rel sl (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ =
slice_access_eq h g sl pos
in
pos `U32.add` f (Ghost.hide (bytes_of_slice_from h sl pos))
inline_for_extraction
let accessor_id
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot (accessor (gaccessor_id p))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let] let _ = slice_access_eq h (gaccessor_id p) input pos in
[@inline_let] let _ = gaccessor_id_eq p (bytes_of_slice_from h input pos) in
pos
#pop-options
inline_for_extraction
let accessor_ext
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl: clens t1 t2)
(#g: gaccessor p1 p2 cl)
(a: accessor g)
(cl': clens t1 t2)
(sq: squash (clens_eq cl cl'))
: Tot (accessor (gaccessor_ext g cl' sq))
= fun #rrel #rel input pos ->
let h = HST.get () in
[@inline_let]
let _ =
slice_access_eq h (gaccessor_ext g cl' sq) input pos;
slice_access_eq h g input pos;
gaccessor_ext_eq g cl' sq (bytes_of_slice_from h input pos)
in
a input pos
#push-options "--z3rlimit 128" // necessary for the .fst
#restart-solver // necessary for the .fst
inline_for_extraction
let accessor_compose
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23)
(sq: unit) // squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
let pos2 = a12' input pos in
let pos3 = a23' input pos2 in
slice_access_eq h a12 input pos;
slice_access_eq h a23 input pos2;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
gaccessor_compose_eq a12 a23 (bytes_of_slice_from h input pos);
pos3
#pop-options
(*
inline_for_extraction
let accessor_compose_strong
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(#cl12: clens t1 t2)
(#a12: gaccessor p1 p2 cl12)
(a12' : accessor a12)
(#k3: parser_kind)
(#t3: Type)
(#p3: parser k3 t3)
(#cl23: clens t2 t3)
(#a23: gaccessor p2 p3 cl23)
(a23' : accessor a23 { clens_compose_strong_pre cl12 cl23 } )
(sq: squash (k2.parser_kind_subkind == Some ParserStrong))
: Tot (accessor (gaccessor_compose_strong a12 a23))
= fun #rrel #rel input pos ->
let h = HST.get () in
slice_access_eq h (gaccessor_compose_strong a12 a23) input pos;
slice_access_eq h (gaccessor_compose a12 a23) input pos;
accessor_compose a12' a23' () input pos
*)
(* Validators *)
[@unifier_hint_injective]
inline_for_extraction
let validator (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
[@unifier_hint_injective]
inline_for_extraction
let validator_no_read (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type =
(#rrel: _) -> (#rel: _) ->
(sl: Ghost.erased (slice rrel rel)) ->
(len: U32.t { len == (Ghost.reveal sl).len }) ->
(pos: U64.t) ->
HST.Stack U64.t
(requires (fun h -> live_slice h sl /\ U64.v pos <= U32.v sl.len))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
if is_success res
then
valid_pos p h sl (uint64_to_uint32 pos) (uint64_to_uint32 res)
else
(~ (valid p h sl (uint64_to_uint32 pos)))
)))
inline_for_extraction
let validate_no_read
(#k: parser_kind) (#t: Type) (#p: parser k t)
(v: validator_no_read p)
: Tot (validator p)
= fun #rrel #rel sl pos -> v (Ghost.hide sl) sl.len pos
noextract
inline_for_extraction
let comment (s: string) : HST.Stack unit
(requires (fun _ -> True))
(ensures (fun h _ h' -> h == h'))
= LowStar.Comment.comment s
noextract
inline_for_extraction
let validate_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
: Tot (validator p)
= fun #rrel #rel sl pos ->
comment s;
v sl pos
inline_for_extraction
let validate_with_error_code
(#k: parser_kind) (#t: Type) (#p: parser k t) (v: validator p) (c: error_code)
: Tot (validator p)
= fun #rrel #rel sl pos ->
let res = v sl pos in
maybe_set_error_code res pos c
inline_for_extraction
let validate
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(v: validator p)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(len: U32.t)
: HST.Stack bool
(requires (fun h ->
B.live h b /\
U32.v len <= B.length b
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let sl = make_slice b len in
(res == true <==> (is_success (Cast.uint32_to_uint64 len) /\ valid p h sl 0ul))
)))
= if is_error (Cast.uint32_to_uint64 len)
then false
else
[@inline_let]
let sl = make_slice b len in
is_success (v sl 0uL)
let valid_total_constant_size
(h: HS.mem)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: nat)
(#rrel #rel: _)
(input: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
))
(ensures (
(valid p h input pos <==> (live_slice h input /\ U32.v input.len - U32.v pos >= k.parser_kind_low)) /\
(valid p h input pos ==> content_length p h input pos == k.parser_kind_low)
))
= parser_kind_prop_equiv k p;
valid_facts p h input pos
inline_for_extraction
let validate_total_constant_size_no_read
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator_no_read p)
= fun #rrel #rel (input: Ghost.erased (slice rrel rel)) len pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 len `U64.sub` pos) sz
then validator_error_not_enough_data
else
(pos `U64.add` sz)
inline_for_extraction
let validate_total_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= validate_no_read (validate_total_constant_size_no_read p sz u)
inline_for_extraction
let validate_total_constant_size_with_error_code
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U64.t)
(c: error_code {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U64.v sz /\
k.parser_kind_metadata == Some ParserKindMetadataTotal
})
: Tot (validator p)
= fun #rrel #rel (input: slice rrel rel) pos ->
let h = HST.get () in
[@inline_let] let _ = valid_total_constant_size h p (U64.v sz) input (uint64_to_uint32 pos) in
if U64.lt (Cast.uint32_to_uint64 input.len `U64.sub` pos) sz
then set_validator_error_pos_and_code validator_error_not_enough_data pos c
else
(pos `U64.add` sz)
let valid_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(p2: parser k2 t)
(h: HS.mem)
#rrel #rel
(sl: slice rrel rel)
(pos: U32.t)
: Lemma
(requires (k1 `is_weaker_than` k2))
(ensures (
(valid (weaken k1 p2) h sl pos \/ valid p2 h sl pos) ==> (
valid p2 h sl pos /\
valid_content_pos (weaken k1 p2) h sl pos (contents p2 h sl pos) (get_valid_pos p2 h sl pos)
)))
= valid_facts (weaken k1 p2) h sl pos;
valid_facts p2 h sl pos
inline_for_extraction
let validate_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: validator p2 { k1 `is_weaker_than` k2 } )
: Tot (validator (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl (uint64_to_uint32 pos)
in
v2 sl pos
[@unifier_hint_injective]
inline_for_extraction
let jumper
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h -> valid p h sl pos))
(ensures (fun h pos' h' ->
B.modifies B.loc_none h h' /\
U32.v pos + content_length p h sl pos == U32.v pos'
))
inline_for_extraction
let jump_constant_size'
(#k: parser_kind)
(#t: Type)
(p: (unit -> GTot (parser k t)))
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper (p ()))
= fun #rrel #rel (input: slice rrel rel) (pos: U32.t) ->
let h = HST.get () in
[@inline_let] let _ = valid_facts (p ()) h input pos in
pos `U32.add` sz
inline_for_extraction
let jump_constant_size
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(sz: U32.t)
(u: unit {
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
})
: Tot (jumper p)
= jump_constant_size' (fun _ -> p) sz u
inline_for_extraction
let jump_weaken
(k1: parser_kind)
(#k2: parser_kind)
(#t: Type)
(#p2: parser k2 t)
(v2: jumper p2 { k1 `is_weaker_than` k2 } )
: Tot (jumper (weaken k1 p2))
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_weaken k1 p2 h sl pos
in
v2 sl pos
let seq_starts_with (#t: Type) (slong sshort: Seq.seq t) : GTot Type0 =
Seq.length sshort <= Seq.length slong /\
Seq.slice slong 0 (Seq.length sshort) `Seq.equal` sshort
let seq_starts_with_trans (#t: Type) (s1 s2 s3: Seq.seq t) : Lemma
(requires (s1 `seq_starts_with` s2 /\ s2 `seq_starts_with` s3))
(ensures (s1 `seq_starts_with` s3))
= ()
let seq_starts_with_append_l_intro (#t: Type) (s1 s2: Seq.seq t) : Lemma
((s1 `Seq.append` s2) `seq_starts_with` s1)
= ()
let seq_starts_with_append_r_elim (#t: Type) (s s1 s2: Seq.seq t) : Lemma
(requires (s `seq_starts_with` (s1 `Seq.append` s2)))
(ensures (
s `seq_starts_with` s1 /\
Seq.slice s (Seq.length s1) (Seq.length s) `seq_starts_with` s2
))
[SMTPat (s `seq_starts_with` (s1 `Seq.append` s2))]
= let s3 = Seq.slice s (Seq.length s1 + Seq.length s2) (Seq.length s) in
assert (s `Seq.equal` (s1 `Seq.append` s2 `Seq.append` s3))
inline_for_extraction
noextract
let jump_serializer
(#k: _)
(#t: _)
(#p: parser k t)
(s: serializer p { k.parser_kind_subkind == Some ParserStrong })
(j: jumper p)
(#rrel #rel: _)
(sl: slice rrel rel)
(pos: U32.t)
(x: Ghost.erased t)
: HST.Stack U32.t
(requires (fun h ->
let sq = serialize s (Ghost.reveal x) in
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
bytes_of_slice_from h sl pos `seq_starts_with` sq
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v pos + Seq.length (serialize s (Ghost.reveal x)) == U32.v res
))
= let h = HST.get () in
let gsq = Ghost.hide (serialize s (Ghost.reveal x)) in
let glen = Ghost.hide (Seq.length (Ghost.reveal gsq)) in
let gpos' = Ghost.hide (pos `U32.add` U32.uint_to_t (Ghost.reveal glen)) in
assert (bytes_of_slice_from_to h sl pos (Ghost.reveal gpos') == Seq.slice (bytes_of_slice_from h sl pos) 0 (Seq.length (serialize s (Ghost.reveal x))));
serialize_valid_exact s h sl (Ghost.reveal x) pos (Ghost.reveal gpos');
valid_exact_valid p h sl pos (Ghost.reveal gpos');
j sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader
(#k: parser_kind)
(#t: Type)
(p: parser k t)
: Tot Type
= (#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack t
(requires (fun h -> valid p h sl pos))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == contents p h sl pos
))
noextract
inline_for_extraction
let read_with_comment
(s: string)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(r: leaf_reader p)
: Tot (leaf_reader p)
= fun #rrel #rel sl pos ->
comment s;
r sl pos
[@unifier_hint_injective]
inline_for_extraction
let leaf_reader_ext
(#k1: parser_kind)
(#t: Type)
(#p1: parser k1 t)
(p32: leaf_reader p1)
(#k2: parser_kind)
(p2: parser k2 t)
(lem: (
(x: bytes) ->
Lemma
(parse p2 x == parse p1 x)
))
: Tot (leaf_reader p2)
= fun #rrel #rel sl pos ->
let h = HST.get () in
[@inline_let] let _ =
valid_facts p1 h sl pos;
valid_facts p2 h sl pos;
lem (bytes_of_slice_from h sl pos)
in
p32 sl pos
let writable
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
: GTot Type0
= let s = B.as_seq h b in
B.live h b /\
((pos <= pos' /\ pos' <= B.length b) ==> (
(forall (s1:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s1)}
forall (s2:Seq.lseq t (pos' - pos)) . {:pattern (Seq.replace_subseq s pos pos' s2)}
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2
)))
let writable_intro
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(_: squash (B.live h b /\ pos <= pos' /\ pos' <= B.length b))
(f: (
(s1: Seq.lseq t (pos' - pos)) ->
(s2: Seq.lseq t (pos' - pos)) ->
Lemma
(let s = B.as_seq h b in
Seq.replace_subseq s pos pos' s1 `rel` Seq.replace_subseq s pos pos' s2)
))
: Lemma
(writable b pos pos' h)
= Classical.forall_intro_2 f
#push-options "--z3rlimit 32"
let writable_weaken
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(lpos lpos' : nat)
: Lemma
(requires (writable b pos pos' h /\ pos <= lpos /\ lpos <= lpos' /\ lpos' <= pos' /\ pos' <= B.length b))
(ensures (writable b lpos lpos' h))
= writable_intro b lpos lpos' h () (fun s1 s2 ->
let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
let j1 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s1) in
let j2 = Seq.replace_subseq s pos pos' (Seq.replace_subseq sl (lpos - pos) (lpos' - pos) s2) in
assert (Seq.replace_subseq s lpos lpos' s1 `Seq.equal` j1);
assert (Seq.replace_subseq s lpos lpos' s2 `Seq.equal` j2);
assert (j1 `rel` j2)
)
#pop-options
let writable_replace_subseq_elim
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s'
))
= let s = B.as_seq h b in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl)
let writable_replace_subseq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(sl' : Seq.seq t)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
Seq.length sl' == pos' - pos /\
B.as_seq h' b `Seq.equal` Seq.replace_subseq (B.as_seq h b) pos pos' sl' /\
B.live h' b
))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' h'
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let sl = Seq.slice s pos pos' in
assert (s `Seq.equal` Seq.replace_subseq s pos pos' sl);
assert (s' `Seq.equal` Seq.replace_subseq s pos pos' sl');
writable_intro b pos pos' h' () (fun s1 s2 ->
assert (Seq.replace_subseq s' pos pos' s1 `Seq.equal` Seq.replace_subseq s pos pos' s1);
assert (Seq.replace_subseq s' pos pos' s2 `Seq.equal` Seq.replace_subseq s pos pos' s2)
)
let writable_ext
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\
pos' <= B.length b /\
B.as_seq h' b `Seq.equal` B.as_seq h b /\
B.live h' b
))
(ensures (
writable b pos pos' h'
))
= writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h b) pos pos') h'
let writable_upd_seq
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(sl' : Seq.seq t)
(h: HS.mem)
: Lemma
(requires (writable b pos pos' h /\ pos <= pos' /\ pos' <= B.length b /\ Seq.length sl' == pos' - pos))
(ensures (
let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
s `rel` s' /\
writable b pos pos' (B.g_upd_seq b s' h)
))
= let s = B.as_seq h b in
let s' = Seq.replace_subseq s pos pos' sl' in
let h' = B.g_upd_seq b s' h in
B.g_upd_seq_as_seq b s' h; // for live
writable_replace_subseq b pos pos' h sl' h'
let writable_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(i: nat)
(v: t)
: Lemma
(requires (writable b pos pos' h /\ pos <= i /\ i < pos' /\ pos' <= B.length b))
(ensures (
let s = B.as_seq h b in
s `rel` Seq.upd s i v /\
writable b pos pos' (B.g_upd b i v h)
))
= let s = B.as_seq h b in
let sl' = Seq.upd (Seq.slice s pos pos') (i - pos) v in
writable_upd_seq b pos pos' sl' h;
assert (Seq.upd s i v `Seq.equal` Seq.replace_subseq s pos pos' sl')
let writable_modifies
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : nat)
(h: HS.mem)
(l: B.loc)
(h' : HS.mem)
: Lemma
(requires (
writable b pos pos' h /\
pos <= pos' /\ pos' <= B.length b /\
B.modifies (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h' /\
B.loc_disjoint l (B.loc_buffer b)
))
(ensures (
writable b pos pos' h'
))
= B.modifies_buffer_from_to_elim b 0ul (U32.uint_to_t pos) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
B.modifies_buffer_from_to_elim b (U32.uint_to_t pos') (B.len b) (l `B.loc_union` B.loc_buffer_from_to b (U32.uint_to_t pos) (U32.uint_to_t pos')) h h';
writable_replace_subseq b pos pos' h (Seq.slice (B.as_seq h' b) pos pos') h'
inline_for_extraction
noextract
let mbuffer_upd
(#t: Type)
(#rrel #rel: _)
(b: B.mbuffer t rrel rel)
(pos pos' : Ghost.erased nat)
(i: U32.t)
(v: t)
: HST.Stack unit
(requires (fun h ->
writable b (Ghost.reveal pos) (Ghost.reveal pos') h /\
Ghost.reveal pos <= U32.v i /\
U32.v i + 1 <= Ghost.reveal pos' /\
Ghost.reveal pos' <= B.length b
))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to b i (i `U32.add` 1ul)) h h' /\
writable b (Ghost.reveal pos) (Ghost.reveal pos') h' /\
B.as_seq h' b == Seq.upd (B.as_seq h b) (U32.v i) v
))
= let h = HST.get () in
writable_upd b (Ghost.reveal pos) (Ghost.reveal pos') h (U32.v i) v;
B.g_upd_modifies_strong b (U32.v i) v h;
B.g_upd_seq_as_seq b (Seq.upd (B.as_seq h b) (U32.v i) v) h;
B.upd' b i v
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_weak
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
live_slice h sl /\
U32.v pos <= U32.v sl.len /\
U32.v sl.len < U32.v max_uint32 /\
writable sl.base (U32.v pos) (U32.v sl.len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from sl pos) h h' /\ (
if pos' = max_uint32
then U32.v pos + serialized_length s x > U32.v sl.len
else valid_content_pos p h' sl pos x pos'
)))
[@unifier_hint_injective]
inline_for_extraction
let leaf_writer_strong
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let sq = B.as_seq h sl.base in
let len = serialized_length s x in
live_slice h sl /\
U32.v pos + len <= U32.v sl.len /\
writable sl.base (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h pos' h' ->
B.modifies (loc_slice_from_to sl pos pos') h h' /\
valid_content_pos p h' sl pos x pos'
))
[@unifier_hint_injective]
inline_for_extraction
let serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
: Tot Type
= (x: t) ->
(#rrel: _) -> (#rel: _) ->
(b: B.mbuffer byte rrel rel) ->
(pos: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v pos + len <= B.length b /\
writable b (U32.v pos) (U32.v pos + len) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b pos (pos `U32.add` len)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x
)))
inline_for_extraction
let serialize32_ext
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(s1': serializer32 s1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(u: squash (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input)))
: Tot (serializer32 (serialize_ext p1 s1 p2))
= fun x #rrel #rel b pos -> s1' x b pos
inline_for_extraction
let frame_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(x: t)
(#rrel: _)
(#rel: _)
(b: B.mbuffer byte rrel rel)
(posl: Ghost.erased U32.t)
(posr: Ghost.erased U32.t)
(pos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
let len = Seq.length (serialize s x) in
let sq = B.as_seq h b in
B.live h b /\
U32.v (Ghost.reveal posl) <= U32.v pos /\
U32.v pos + len <= U32.v (Ghost.reveal posr) /\
U32.v (Ghost.reveal posr) <= B.length b /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h
))
(ensures (fun h len h' ->
Seq.length (serialize s x) == U32.v len /\ (
B.modifies (B.loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr)) h h' /\
B.live h b /\
Seq.slice (B.as_seq h' b) (U32.v pos) (U32.v pos + U32.v len) `Seq.equal` serialize s x /\
writable b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h' /\
Seq.slice (B.as_seq h' b) (U32.v (Ghost.reveal posl)) (U32.v pos) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v (Ghost.reveal posl)) (U32.v pos) /\
Seq.slice (B.as_seq h' b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr)) `Seq.equal` Seq.slice (B.as_seq h b) (U32.v pos + U32.v len) (U32.v (Ghost.reveal posr))
)))
=
let h0 = HST.get () in
writable_weaken b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 (U32.v pos) (U32.v pos + Seq.length (serialize s x));
let res = s32 x b pos in
let h1 = HST.get () in
let pos' = pos `U32.add` res in
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos pos';
writable_modifies b (U32.v (Ghost.reveal posl)) (U32.v (Ghost.reveal posr)) h0 B.loc_none h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) (Ghost.reveal posl) pos;
B.loc_disjoint_loc_buffer_from_to b (Ghost.reveal posl) pos pos pos';
B.modifies_buffer_from_to_elim b (Ghost.reveal posl) pos (B.loc_buffer_from_to b pos pos') h0 h1;
B.loc_includes_loc_buffer_from_to b (Ghost.reveal posl) (Ghost.reveal posr) pos' (Ghost.reveal posr);
B.loc_disjoint_loc_buffer_from_to b pos pos' pos' (Ghost.reveal posr);
B.modifies_buffer_from_to_elim b pos' (Ghost.reveal posr) (B.loc_buffer_from_to b pos pos') h0 h1;
res
inline_for_extraction
let leaf_writer_strong_of_serializer32
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: serializer32 s)
(u: squash (k.parser_kind_subkind == Some ParserStrong))
: Tot (leaf_writer_strong s)
= fun x #rrel #rel input pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let len = s32 x input.base pos in
[@inline_let]
let pos' = pos `U32.add` len in
let h = HST.get () in
[@inline_let] let _ =
let large = bytes_of_slice_from h input pos in
let small = bytes_of_slice_from_to h input pos pos' in
parse_strong_prefix p small large;
valid_facts p h input pos
in
pos'
inline_for_extraction
let leaf_writer_weak_of_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz /\
k.parser_kind_low < U32.v max_uint32
))
: Tot (leaf_writer_weak s)
= fun x #rrel #rel input pos ->
if (input.len `U32.sub` pos) `U32.lt` sz
then max_uint32
else begin
let h = HST.get () in
writable_weaken input.base (U32.v pos) (U32.v input.len) h (U32.v pos) (U32.v pos + U32.v sz);
s32 x input pos
end
inline_for_extraction
let serializer32_of_leaf_writer_strong_constant_size
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(#s: serializer p)
(s32: leaf_writer_strong s)
(sz: U32.t)
(u: squash (
k.parser_kind_subkind == Some ParserStrong /\
k.parser_kind_high == Some k.parser_kind_low /\
k.parser_kind_low == U32.v sz
))
: Tot (serializer32 s)
= fun x #rrel #rel b pos ->
serialized_length_eq s x;
let h0 = HST.get () in
let pos' = s32 x (make_slice b (pos `U32.add` sz)) pos in
[@inline_let]
let len = pos' `U32.sub` pos in
let h = HST.get () in
[@inline_let] let _ =
valid_valid_exact p h (make_slice b (pos `U32.add` sz)) pos;
valid_exact_serialize s h (make_slice b (pos `U32.add` sz)) pos pos'
in
len
inline_for_extraction
let blit_strong
(#a:Type) (#rrel1 #rrel2 #rel1 #rel2: _)
(src: B.mbuffer a rrel1 rel1)
(idx_src:U32.t)
(dst: B.mbuffer a rrel2 rel2)
(idx_dst:U32.t)
(len:U32.t)
: HST.Stack unit
(requires (fun h ->
B.live h src /\ B.live h dst /\
U32.v idx_src + U32.v len <= B.length src /\
U32.v idx_dst + U32.v len <= B.length dst /\
B.loc_disjoint (B.loc_buffer_from_to src idx_src (idx_src `U32.add` len)) (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) /\
rel2 (B.as_seq h dst)
(Seq.replace_subseq (B.as_seq h dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len)
(Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures (fun h _ h' ->
B.modifies (B.loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) h h' /\
B.live h' dst /\
Seq.slice (B.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (B.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)
))
= let h = HST.get () in
B.blit src idx_src dst idx_dst len;
let h' = HST.get () in
B.modifies_loc_buffer_from_to_intro dst idx_dst (idx_dst `U32.add` len) B.loc_none h h'
#push-options "--z3rlimit 16"
inline_for_extraction
let copy_strong
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
U32.v dpos + U32.v spos' - U32.v spos <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from_to dst dpos (dpos `U32.add` (spos' `U32.sub` spos)))
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos' /\
dpos' `U32.sub` dpos == spos' `U32.sub` spos
))
= let h0 = HST.get () in
let len = spos' `U32.sub` spos in
valid_facts p h0 src spos;
writable_replace_subseq_elim dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h0 (Seq.slice (B.as_seq h0 src.base) (U32.v spos) (U32.v spos'));
blit_strong src.base spos dst.base dpos len;
let h = HST.get () in
[@inline_let] let dpos' = dpos `U32.add` len in
parse_strong_prefix p (bytes_of_slice_from h0 src spos) (bytes_of_slice_from h dst dpos);
valid_facts p h dst dpos;
dpos'
#pop-options
inline_for_extraction
let copy_strong'
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\ (
let clen = content_length p h src spos in
U32.v dpos + clen <= U32.v dst.len /\
live_slice h dst /\
writable dst.base (U32.v dpos) (U32.v dpos + clen) h /\
B.loc_disjoint (loc_slice_from src spos) (loc_slice_from_to dst dpos (dpos `U32.add` (U32.uint_to_t clen)))
)))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from_to dst dpos dpos') h h' /\
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
))
= let spos' = j src spos in
copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak_with_length
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(src: slice rrel1 rel1) // FIXME: length is useless here
(spos spos' : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid_pos p h src spos spos' /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (U32.v spos' - U32.v spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos spos') (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= if (dst.len `U32.sub` dpos) `U32.lt` (spos' `U32.sub` spos)
then max_uint32
else copy_strong p src spos spos' dst dpos
inline_for_extraction
let copy_weak
(#rrel1 #rrel2 #rel1 #rel2: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(jmp: jumper p)
(src: slice rrel1 rel1)
(spos : U32.t)
(dst: slice rrel2 rel2)
(dpos: U32.t)
: HST.Stack U32.t
(requires (fun h ->
k.parser_kind_subkind == Some ParserStrong /\
valid p h src spos /\
live_slice h dst /\
U32.v dpos <= U32.v dst.len /\
U32.v dst.len < U32.v max_uint32 /\
writable dst.base (U32.v dpos) (U32.v dpos + (content_length p h src spos)) h /\
B.loc_disjoint (loc_slice_from_to src spos (get_valid_pos p h src spos)) (loc_slice_from dst dpos)
))
(ensures (fun h dpos' h' ->
B.modifies (loc_slice_from dst dpos) h h' /\ (
if dpos' = max_uint32
then
U32.v dpos + content_length p h src spos > U32.v dst.len
else
valid_content_pos p h' dst dpos (contents p h src spos) dpos'
)))
= let spos' = jmp src spos in
copy_weak_with_length p src spos spos' dst dpos
(* fold_left on lists *)
module BF = LowStar.Buffer
#push-options "--z3rlimit 256 --fuel 1 --ifuel 1"
#restart-solver
inline_for_extraction
let list_fold_left_gen
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(post_interrupt: ((h: HS.mem) -> GTot Type0))
(post_interrupt_frame: (h: HS.mem) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
post_interrupt h
)) (ensures (post_interrupt h')))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
HST.Stack bool
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos1 /\
valid_pos p h0 sl pos1 pos2 /\
valid_list p h0 sl pos2 pos' /\
inv h (contents_list p h0 sl pos pos1) (contents p h0 sl pos1 :: contents_list p h0 sl pos2 pos') pos1
))
(ensures (fun h ctinue h' ->
B.modifies (Ghost.reveal l) h h' /\
(if ctinue then inv h' (contents_list p h0 sl pos pos1 `L.append` [contents p h0 sl pos1]) (contents_list p h0 sl pos2 pos') pos2 else post_interrupt h')
))
))
: HST.Stack bool
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h res h' ->
B.modifies (Ghost.reveal l) h h' /\
(if res then inv h' (contents_list p h sl pos pos') [] pos' else post_interrupt h')
))
= HST.push_frame ();
let h1 = HST.get () in
// B.fresh_frame_modifies h0 h1;
let bpos : BF.pointer U32.t = BF.alloca pos 1ul in
let bctinue : BF.pointer bool = BF.alloca true 1ul in
let btest: BF.pointer bool = BF.alloca (pos `U32.lt` pos') 1ul in
let h2 = HST.get () in
assert (B.modifies B.loc_none h0 h2);
let test_pre (h: HS.mem) : GTot Type0 =
B.live h bpos /\ B.live h bctinue /\ B.live h btest /\ (
let pos1 = Seq.index (B.as_seq h bpos) 0 in
let ctinue = Seq.index (B.as_seq h bctinue) 0 in
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
B.modifies (Ghost.reveal l `B.loc_union` B.loc_region_only true (HS.get_tip h1)) h2 h /\
Seq.index (B.as_seq h btest) 0 == ((U32.v (Seq.index (B.as_seq h bpos) 0) < U32.v pos') && Seq.index (B.as_seq h bctinue) 0) /\
(if ctinue then inv h (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 else post_interrupt h)
)
in
let test_post (cond: bool) (h: HS.mem) : GTot Type0 =
test_pre h /\
cond == Seq.index (B.as_seq h btest) 0
in
valid_list_nil p h0 sl pos;
inv_frame h0 [] (contents_list p h0 sl pos pos') pos h1;
inv_frame h1 [] (contents_list p h0 sl pos pos') pos h2;
[@inline_let]
let while_body () : HST.Stack unit
(requires (fun h -> test_post true h))
(ensures (fun _ _ h1 -> test_pre h1))
=
let h51 = HST.get () in
let pos1 = B.index bpos 0ul in
valid_list_cons_recip p h0 sl pos1 pos';
//assert (B.modifies (Ghost.reveal l `B.loc_union` B.loc_buffer bpos) h0 h51);
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos'));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos pos1));
valid_list_cons_recip p h51 sl pos1 pos';
let pos2 = j sl pos1 in
let h52 = HST.get () in
inv_frame h51 (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') pos1 h52;
B.modifies_only_not_unused_in (Ghost.reveal l) h0 h52;
let ctinue = body pos1 pos2 in
let h53 = HST.get () in
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2));
//assert (B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos2 pos'));
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h1));
valid_pos_frame_strong p h0 sl pos1 pos2 (Ghost.reveal l) h53;
valid_list_snoc p h0 sl pos pos1;
B.upd bpos 0ul pos2;
B.upd bctinue 0ul ctinue;
B.upd btest 0ul ((pos2 `U32.lt` pos') && ctinue);
let h54 = HST.get () in
[@inline_let]
let _ =
if ctinue
then inv_frame h53 (contents_list p h0 sl pos pos2) (contents_list p h0 sl pos2 pos') pos2 h54
else post_interrupt_frame h53 h54
in
()
in
C.Loops.while
#test_pre
#test_post
(fun (_: unit) -> (
B.index btest 0ul) <: HST.Stack bool (requires (fun h -> test_pre h)) (ensures (fun h x h1 -> test_post x h1)))
while_body
;
valid_list_nil p h0 sl pos';
let res = B.index bctinue 0ul in
let h3 = HST.get () in
HST.pop_frame ();
let h4 = HST.get () in
//B.popped_modifies h3 h4;
B.loc_regions_unused_in h0 (Set.singleton (HS.get_tip h3));
[@inline_let]
let _ =
if res
then inv_frame h3 (contents_list p h0 sl pos pos') [] pos' h4
else post_interrupt_frame h3 h4
in
res
#pop-options
//B.loc_includes_union_l (B.loc_all_regions_from false (HS.get_tip h1)) (Ghost.reveal l) (Ghost.reveal l)
//B.modifies_fresh_frame_popped h0 h1 (Ghost.reveal l) h3 h4
module G = FStar.Ghost
inline_for_extraction
let list_fold_left
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(h0: HS.mem)
(l: Ghost.erased B.loc { B.loc_disjoint (Ghost.reveal l) (loc_slice_from_to sl pos pos') } )
(inv: (HS.mem -> list t -> list t -> U32.t -> GTot Type0))
(inv_frame: (h: HS.mem) -> (l1: list t) -> (l2: list t) -> (pos1: U32.t) -> (h' : HS.mem) -> Lemma (requires (
B.modifies (B.loc_unused_in h0) h h' /\
inv h l1 l2 pos1
)) (ensures (inv h' l1 l2 pos1)))
(body: (
(pos1: U32.t) ->
(pos2: U32.t) ->
(l1: Ghost.erased (list t)) ->
(x: Ghost.erased t) ->
(l2: Ghost.erased (list t)) ->
HST.Stack unit
(requires (fun h ->
B.modifies (Ghost.reveal l) h0 h /\
valid_list p h0 sl pos pos' /\
valid_content_pos p h0 sl pos1 (G.reveal x) pos2 /\
U32.v pos <= U32.v pos1 /\ U32.v pos2 <= U32.v pos' /\
B.loc_includes (loc_slice_from_to sl pos pos') (loc_slice_from_to sl pos1 pos2) /\
inv h (Ghost.reveal l1) (Ghost.reveal x :: Ghost.reveal l2) pos1 /\
contents_list p h0 sl pos pos' == Ghost.reveal l1 `L.append` (Ghost.reveal x :: Ghost.reveal l2)
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (Ghost.reveal l1 `L.append` [contents p h0 sl pos1]) (Ghost.reveal l2) pos2
))
))
: HST.Stack unit
(requires (fun h ->
h == h0 /\
valid_list p h sl pos pos' /\
inv h [] (contents_list p h sl pos pos') pos
))
(ensures (fun h _ h' ->
B.modifies (Ghost.reveal l) h h' /\
inv h' (contents_list p h sl pos pos') [] pos'
))
= let _ = list_fold_left_gen
p
j
sl
pos pos'
h0
l
inv
inv_frame
(fun _ -> False)
(fun _ _ -> ())
(fun pos1 pos2 ->
let h = HST.get () in
valid_list_cons p h sl pos1 pos';
valid_list_append p h sl pos pos1 pos';
body
pos1
pos2
(Ghost.hide (contents_list p h sl pos pos1))
(Ghost.hide (contents p h sl pos1))
(Ghost.hide (contents_list p h sl pos2 pos'))
;
true
)
in
()
inline_for_extraction
let list_length
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
U32.v res == L.length (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
B.fresh_frame_modifies h0 h1;
let blen : BF.pointer U32.t = BF.alloca 0ul 1ul in
let h2 = HST.get () in
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer blen))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_buffer blen) h2 h /\
B.live h blen /\ (
let len = U32.v (Seq.index (B.as_seq h blen) 0) in
len <= U32.v pos1 /\ // necessary to prove that length computations do not overflow
len == L.length l1
))
(fun h l1 l2 pos1 h' ->
B.modifies_only_not_unused_in (B.loc_buffer blen) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
B.upd blen 0ul (B.index blen 0ul `U32.add` 1ul);
Classical.forall_intro_2 (list_length_append #t)
)
;
let len = B.index blen 0ul in
HST.pop_frame ();
len
#push-options "--z3rlimit 32 --fuel 2 --ifuel 1"
inline_for_extraction
let list_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(f: (t -> Tot bool))
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
(x: Ghost.erased t) ->
HST.Stack bool
(requires (fun h -> valid_content p h sl pos (G.reveal x)))
(ensures (fun h res h' -> B.modifies B.loc_none h h' /\ res == f (G.reveal x)))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
(#rrel_out #rel_out: _)
(sl_out : slice rrel_out rel_out)
(pos_out : U32.t)
: HST.Stack U32.t
(requires (fun h ->
U32.v pos_out + U32.v pos' - U32.v pos <= U32.v sl_out.len /\
valid_list p h sl pos pos' /\
B.loc_disjoint (loc_slice_from_to sl pos pos') (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
live_slice h sl_out
))
(ensures (fun h pos_out' h' ->
B.modifies (loc_slice_from_to sl_out pos_out pos_out') h h' /\
U32.v pos_out' - U32.v pos_out <= U32.v pos' - U32.v pos /\
valid_list p h' sl_out pos_out pos_out' /\
contents_list p h' sl_out pos_out pos_out' == L.filter f (contents_list p h sl pos pos')
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
//B.fresh_frame_modifies h0 h1;
let bpos_out' : BF.pointer U32.t = BF.alloca pos_out 1ul in
let h2 = HST.get () in
let inv (h: HS.mem) (l1 l2: list t) (pos1: U32.t) : GTot Type0 =
B.live h bpos_out' /\ (
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h /\
writable sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h /\
valid_list p h sl_out pos_out pos_out' /\
contents_list p h sl_out pos_out pos_out' == L.filter f l1 /\
U32.v pos_out' - U32.v pos1 <= U32.v pos_out - U32.v pos // necessary to prove that length computations do not overflow
)
in
valid_list_nil p h2 sl_out pos_out;
list_fold_left
p
j
sl
pos
pos'
h2
(Ghost.hide (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))))
inv
(fun h l1 l2 pos1 h' ->
let pos_out' = Seq.index (B.as_seq h bpos_out') 0 in
B.modifies_only_not_unused_in (B.loc_buffer bpos_out' `B.loc_union` loc_slice_from_to sl_out pos_out pos_out') h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 l1 x l2 ->
let pos_out1 = B.index bpos_out' 0ul in
list_filter_append f (G.reveal l1) [G.reveal x];
if f' sl pos1 x
then begin
assert (B.loc_includes (loc_slice_from_to sl_out pos_out (pos_out `U32.add` (pos' `U32.sub` pos))) (loc_slice_from_to sl_out pos_out1 (pos_out1 `U32.add` (pos2 `U32.sub` pos1))));
let h = HST.get () in
writable_weaken sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (U32.v pos_out1) (U32.v pos_out1 + (U32.v pos2 - U32.v pos1));
let pos_out2 = copy_strong p sl pos1 pos2 sl_out pos_out1 in
B.upd bpos_out' 0ul pos_out2;
let h' = HST.get () in
writable_modifies sl_out.base (U32.v pos_out) (U32.v pos_out + (U32.v pos' - U32.v pos)) h (B.loc_region_only true (HS.get_tip h1)) h';
valid_list_nil p h' sl_out pos_out2;
valid_list_cons p h' sl_out pos_out1 pos_out2;
valid_list_append p h' sl_out pos_out pos_out1 pos_out2
end else
L.append_l_nil (L.filter f (G.reveal l1))
)
;
let pos_out' = B.index bpos_out' 0ul in
HST.pop_frame ();
pos_out'
#pop-options
#push-options "--z3rlimit 64 --fuel 2 --ifuel 1"
inline_for_extraction
let list_nth
(#rrel #rel: _)
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(j: jumper p)
(sl: slice rrel rel)
(pos pos' : U32.t)
(i: U32.t)
: HST.Stack U32.t
(requires (fun h ->
valid_list p h sl pos pos' /\
U32.v i < L.length (contents_list p h sl pos pos')
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
valid_list p h sl pos res /\
valid p h sl res /\
valid_list p h sl (get_valid_pos p h sl res) pos' /\
L.length (contents_list p h sl pos res) == U32.v i /\
contents p h sl res == L.index (contents_list p h sl pos pos') (U32.v i)
))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bpos1 = BF.alloca pos 1ul in
let bk = BF.alloca 0ul 1ul in
let h2 = HST.get () in
valid_list_nil p h0 sl pos;
let _ : bool = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
let k = Seq.index (B.as_seq h bk) 0 in
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bpos1 /\
B.live h bk /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl pos1 pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v k /\
U32.v k <= U32.v i
)
(fun h _ _ _ h' ->
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bpos1);
// assert (B.loc_not_unused_in h2 `B.loc_includes` B.loc_buffer bk);
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
let pos1 = Seq.index (B.as_seq h bpos1) 0 in
B.live h bpos1 /\
valid p h0 sl pos1 /\
valid_list p h0 sl pos pos1 /\
valid_list p h0 sl (get_valid_pos p h0 sl pos1) pos' /\
L.length (contents_list p h0 sl pos pos1) == U32.v i /\
contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i)
)
(fun _ _ ->
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1 pos2 ->
let k = B.index bk 0ul in
if k = i
then begin
B.upd bpos1 0ul pos1;
valid_list_cons_recip p h0 sl pos1 pos';
list_index_append (contents_list p h0 sl pos pos1) (contents_list p h0 sl pos1 pos') (U32.v i);
valid_list_append p h0 sl pos pos1 pos' ;
assert (contents p h0 sl pos1 == L.index (contents_list p h0 sl pos pos') (U32.v i));
false
end else begin
B.upd bk 0ul (k `U32.add` 1ul);
let h = HST.get () in
B.modifies_only_not_unused_in B.loc_none h0 h;
valid_list_snoc p h0 sl pos pos1;
assert (valid p h0 sl pos1);
assert (pos2 == get_valid_pos p h0 sl pos1);
assert (valid_list p h0 sl pos pos2);
list_length_append (contents_list p h0 sl pos pos1) [contents p h0 sl pos1];
true
end
)
in
let res = B.index bpos1 0ul in
HST.pop_frame ();
res
inline_for_extraction
let list_find
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(f: (t -> Tot bool)) // should be GTot, but List.find requires Tot
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack bool
(requires (fun h ->
valid p h sl pos
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == f (contents p h sl pos)
))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack U32.t
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\ (
let l = contents_list p h sl pos pos' in
if res = pos'
then L.find f l == None
else
U32.v pos <= U32.v res /\
valid p h sl res /\ (
let x = contents p h sl res in
U32.v res + content_length p h sl res <= U32.v pos' /\
f x == true /\
L.find f l == Some x
)
)))
= let h0 = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bres = BF.alloca 0ul 1ul in
let h2 = HST.get () in
let not_found = list_fold_left_gen
p
j
sl
pos pos'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1)))
(fun h l1 l2 pos1 ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\
valid_list p h0 sl pos1 pos' /\
l2 == contents_list p h0 sl pos1 pos' /\
L.find f (contents_list p h0 sl pos pos') == L.find f l2
)
(fun h _ _ _ h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun h ->
B.modifies (B.loc_region_only true (HS.get_tip h1)) h2 h /\
B.live h bres /\ (
let res = Seq.index (B.as_seq h bres) 0 in
U32.v pos <= U32.v res /\
valid p h0 sl res /\ (
let x = contents p h0 sl res in
U32.v res + content_length p h0 sl res <= U32.v pos' /\
f x == true /\
L.find f (contents_list p h0 sl pos pos') == Some x
)))
(fun h h' ->
B.loc_unused_in_not_unused_in_disjoint h2;
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1)) h2 h'
)
(fun pos1 pos2 ->
if f' sl pos1
then begin
B.upd bres 0ul pos1;
false
end
else true
)
in
let res =
if not_found
then pos'
else B.index bres 0ul
in
HST.pop_frame ();
res
#pop-options
let rec list_existsb_find
(#a: Type)
(f: (a -> Tot bool))
(l: list a)
: Lemma
(L.existsb f l == Some? (L.find f l))
= match l with
| [] -> ()
| x :: q ->
if f x
then ()
else list_existsb_find f q
inline_for_extraction
noextract
let list_existsb
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(f: (t -> Tot bool)) // should be GTot, but List.find requires Tot
(f' : (
(#rrel: _) ->
(#rel: _) ->
(sl: slice rrel rel) ->
(pos: U32.t) ->
HST.Stack bool
(requires (fun h ->
valid p h sl pos
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == f (contents p h sl pos)
))
))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack bool
(requires (fun h -> valid_list p h sl pos pos'))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res == L.existsb f (contents_list p h sl pos pos')
))
= let h = HST.get () in
list_existsb_find f (contents_list p h sl pos pos');
let posn = list_find j f f' sl pos pos' in
posn <> pos'
#push-options "--fuel 2 --ifuel 1 --z3rlimit 256 --query_stats"
inline_for_extraction
noextract
let list_flatten_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 { k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0 } )
(f: (t1 -> Tot (list t2))) // should be GTot, but List.Tot.map requires Tot
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1' : U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2: U32.t {
valid_list p1 h0 sl1 pos1 pos1' /\
U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\
U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32
})
(f' : (
(pos1_: U32.t) ->
(pos2_: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\
U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\ (
let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2_ res /\
contents_list p2 h' sl2 pos2_ res == y
)))
))
: HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\ (
let y = List.Tot.flatten (List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1')) in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2 res /\
contents_list p2 h' sl2 pos2 res == y
)))
= let hz = HST.get () in
HST.push_frame ();
let h1 = HST.get () in
let bpos2_ = BF.alloca pos2 1ul in
let h2 = HST.get () in
valid_list_nil p2 hz sl2 pos2;
let fits = list_fold_left_gen
p1
j1
sl1
pos1 pos1'
h2
(Ghost.hide (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2))
(fun h ll lr _ ->
B.modifies (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h /\
B.live h bpos2_ /\ (
let pos2_ = Seq.index (B.as_seq h bpos2_) 0 in
contents_list p1 h0 sl1 pos1 pos1' == ll `List.Tot.append` lr /\
valid_list p2 h sl2 pos2 pos2_ /\
contents_list p2 h sl2 pos2 pos2_ == List.Tot.flatten (List.Tot.map f ll) /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(fun h _ _ _ h' ->
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun h ->
B.modifies (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h /\
U32.v pos2 + serialized_list_length s2 (List.Tot.flatten (List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1'))) > U32.v sl2.len
)
(fun h h' ->
B.modifies_only_not_unused_in (B.loc_region_only true (HS.get_tip h1) `B.loc_union` loc_slice_from sl2 pos2) h2 h';
B.loc_unused_in_not_unused_in_disjoint h2
)
(fun pos1l pos1r ->
let pos2_ = B.index bpos2_ 0ul in
let h = HST.get () in
writable_weaken sl2.base (U32.v pos2) (U32.v sl2.len) h (U32.v pos2_) (U32.v sl2.len);
valid_pos_frame_strong p1 h0 sl1 pos1l pos1r (loc_slice_from sl2 pos2) hz;
let res = f' pos1l pos2_ in
let fits = not (res = max_uint32) in
if fits then begin
B.upd bpos2_ 0ul res;
let h' = HST.get () in
writable_modifies sl2.base (U32.v pos2) (U32.v sl2.len) h (B.loc_region_only true (HS.get_tip h1)) h' ;
List.Tot.append_assoc (contents_list p1 h0 sl1 pos1 pos1l) [contents p1 h0 sl1 pos1l] (contents_list p1 h0 sl1 pos1r pos1');
list_flatten_map_append f (contents_list p1 h0 sl1 pos1 pos1l) [contents p1 h0 sl1 pos1l];
valid_list_snoc p1 h0 sl1 pos1 pos1l;
valid_list_append p2 h' sl2 pos2 pos2_ res;
valid_list_nil p2 h' sl2 res;
valid_list_append p2 h' sl2 pos2_ res res
end else begin
let h' = HST.get () in
valid_list_cons p1 h0 sl1 pos1l pos1' ;
valid_list_append p1 h0 sl1 pos1 pos1l pos1' ;
list_flatten_map_append f (contents_list p1 h0 sl1 pos1 pos1l) (contents_list p1 h0 sl1 pos1l pos1');
serialized_list_length_append s2 (L.flatten (L.map f (contents_list p1 h0 sl1 pos1 pos1l))) (L.flatten (L.map f (contents_list p1 h0 sl1 pos1l pos1')));
serialized_list_length_append s2 (f (contents p1 h0 sl1 pos1l)) (L.flatten (L.map f (contents_list p1 h0 sl1 pos1r pos1')));
valid_list_serialized_list_length s2 h' sl2 pos2 pos2_
end;
fits
)
in
let res =
if fits
then B.index bpos2_ 0ul
else max_uint32
in
HST.pop_frame ();
res
#pop-options
#push-options "--z3rlimit 16 --query_stats"
inline_for_extraction
noextract
let list_map
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(j1: jumper p1)
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2 { k2.parser_kind_subkind == Some ParserStrong /\ k2.parser_kind_low > 0 } )
(f: (t1 -> Tot t2)) // should be GTot, but List.Tot.map requires Tot
(h0: HS.mem)
(#rrel1 #rel1: _)
(sl1: slice rrel1 rel1)
(pos1 pos1' : U32.t)
(#rrel2 #rel2: _)
(sl2: slice rrel2 rel2)
(pos2: U32.t {
valid_list p1 h0 sl1 pos1 pos1' /\
U32.v pos1 <= U32.v pos1' /\
U32.v pos1' <= U32.v sl1.len /\
U32.v pos2 <= U32.v sl2.len /\
B.loc_disjoint (loc_slice_from_to sl1 pos1 pos1') (loc_slice_from sl2 pos2) /\
U32.v sl2.len < U32.v max_uint32
})
(f' : (
(pos1_: U32.t) ->
(pos2_: U32.t) ->
HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
valid p1 h0 sl1 pos1_ /\
U32.v pos1 <= U32.v pos1_ /\
U32.v pos1_ + content_length p1 h0 sl1 pos1_ <= U32.v pos1' /\
live_slice h sl2 /\
U32.v pos2 <= U32.v pos2_ /\
U32.v pos2_ <= U32.v sl2.len /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2_) h h' /\ (
let y = f (contents p1 h0 sl1 pos1_) in
if res = max_uint32
then U32.v pos2_ + serialized_length s2 y > U32.v sl2.len
else
valid_content_pos p2 h' sl2 pos2_ y res
)))
))
: HST.Stack U32.t
(requires (fun h ->
B.modifies (loc_slice_from sl2 pos2) h0 h /\
live_slice h sl2 /\
writable sl2.base (U32.v pos2) (U32.v sl2.len) h
))
(ensures (fun h res h' ->
B.modifies (loc_slice_from sl2 pos2) h h' /\ (
let y = List.Tot.map f (contents_list p1 h0 sl1 pos1 pos1') in
if res = max_uint32
then U32.v pos2 + serialized_list_length s2 y > U32.v sl2.len
else
valid_list p2 h' sl2 pos2 res /\
contents_list p2 h' sl2 pos2 res == y
)))
= list_map_list_flatten_map f (contents_list p1 h0 sl1 pos1 pos1');
list_flatten_map
j1
s2
(fun x -> [f x])
h0
sl1 pos1 pos1'
sl2 pos2
(fun pos1 pos2 ->
let res = f' pos1 pos2 in
let h = HST.get () in
if res = max_uint32
then begin
serialized_list_length_nil s2;
serialized_list_length_cons s2 (f (contents p1 h0 sl1 pos1)) []
end
else begin
valid_list_nil p2 h sl2 res;
valid_list_cons p2 h sl2 pos2 res
end;
res
)
(* Example: trivial printers *)
inline_for_extraction
let print_list
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(j: jumper p)
(print: ((#rrel: _) -> (#rel: _) -> (sl: slice rrel rel) -> (pos: U32.t) -> HST.Stack unit (requires (fun h -> valid p h sl pos)) (ensures (fun h _ h' -> B.modifies B.loc_none h h'))))
(#rrel #rel: _)
(sl: slice rrel rel)
(pos pos' : U32.t)
: HST.Stack unit
(requires (fun h ->
valid_list p h sl pos pos'
))
(ensures (fun h _ h' ->
B.modifies B.loc_none h h'
))
= let h0 = HST.get () in
list_fold_left
p
j
sl
pos pos'
h0
(Ghost.hide B.loc_none)
(fun _ _ _ _ -> True)
(fun _ _ _ _ _ -> ())
(fun pos1 _ _ _ _ ->
print sl pos1
)
(* Monotonicity *)
inline_for_extraction
let compl_t (t: Type) = U32.t -> t -> U32.t -> Tot (B.spred byte)
let wvalid
(#t: Type) (#k: parser_kind) (p: parser k t) (#rrel #rel: _) (s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
(gpos' : Ghost.erased U32.t)
(gv: Ghost.erased t)
(x: Seq.seq byte)
: GTot prop
=
U32.v pos <= U32.v (Ghost.reveal gpos') /\
U32.v (Ghost.reveal gpos') <= U32.v s.len /\
U32.v s.len <= Seq.length x /\
parse p (Seq.slice x (U32.v pos) (U32.v s.len)) == Some (Ghost.reveal gv, U32.v (Ghost.reveal gpos') - U32.v pos) /\
compl pos (Ghost.reveal gv) (Ghost.reveal gpos') x
let wvalid_valid_content_pos
(#t: Type) (#k: parser_kind) (p: parser k t) (#rrel #rel: _) (s: slice rrel rel)
(compl: compl_t t)
(pos: U32.t)
(gpos' : Ghost.erased U32.t)
(gv: Ghost.erased t)
(x: Seq.seq byte)
(h: HS.mem)
: Lemma
(requires (
wvalid p s compl pos gpos' gv x /\
live_slice h s /\
x == B.as_seq h s.base
))
(ensures (
valid_content_pos p h s pos gv gpos'
))
=
valid_facts p h s pos
inline_for_extraction
noeq
type irepr (#t: Type) (#k: parser_kind) (p: parser k t) (#rrel #rel: _) (s: slice rrel rel) (compl: compl_t t) =
| IRepr:
(pos: U32.t) ->
(gpos' : Ghost.erased U32.t) ->
(gv: Ghost.erased t) ->
(irepr_correct: squash (
U32.v pos <= U32.v (Ghost.reveal gpos') /\
U32.v (Ghost.reveal gpos') <= U32.v s.len /\
B.witnessed s.base (wvalid p s compl pos gpos' gv)
)) ->
irepr p s compl
inline_for_extraction
let irepr_pos
(#t: Type) (#k: parser_kind) (#p: parser k t) (#rrel #rel: _) (#s: slice rrel rel) (#compl: compl_t t) (x: irepr p s compl) : Tot U32.t =
IRepr?.pos x
let irepr_pos'
(#t: Type) (#k: parser_kind) (#p: parser k t) (#rrel #rel: _) (#s: slice rrel rel) (#compl: compl_t t) (x: irepr p s compl) : Ghost U32.t
(requires True) | false | false | LowParse.Low.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val irepr_pos'
(#t: Type)
(#k: parser_kind)
(#p: parser k t)
(#rrel #rel: _)
(#s: slice rrel rel)
(#compl: compl_t t)
(x: irepr p s compl)
: Ghost U32.t (requires True) (ensures (fun y -> True)) | [] | LowParse.Low.Base.irepr_pos' | {
"file_name": "src/lowparse/LowParse.Low.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | x: LowParse.Low.Base.irepr p s compl -> Prims.Ghost FStar.UInt32.t | {
"end_col": 31,
"end_line": 2068,
"start_col": 2,
"start_line": 2068
} |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.