file_name
stringlengths 5
52
| name
stringlengths 4
95
| original_source_type
stringlengths 0
23k
| source_type
stringlengths 9
23k
| source_definition
stringlengths 9
57.9k
| source
dict | source_range
dict | file_context
stringlengths 0
721k
| dependencies
dict | opens_and_abbrevs
listlengths 2
94
| vconfig
dict | interleaved
bool 1
class | verbose_type
stringlengths 1
7.42k
| effect
stringclasses 118
values | effect_flags
sequencelengths 0
2
| mutual_with
sequencelengths 0
11
| ideal_premises
sequencelengths 0
236
| proof_features
sequencelengths 0
1
| is_simple_lemma
bool 2
classes | is_div
bool 2
classes | is_proof
bool 2
classes | is_simply_typed
bool 2
classes | is_type
bool 2
classes | partial_definition
stringlengths 5
3.99k
| completed_definiton
stringlengths 1
1.63M
| isa_cross_project_example
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.tot_parse_fret | val tot_parse_fret (#t #t': Type) (f: (t -> Tot t')) (v: t) : Tot (tot_parser parse_ret_kind t') | val tot_parse_fret (#t #t': Type) (f: (t -> Tot t')) (v: t) : Tot (tot_parser parse_ret_kind t') | let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 21,
"end_line": 86,
"start_col": 0,
"start_line": 84
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: (_: t -> t') -> v: t -> LowParse.Spec.Base.tot_parser LowParse.Spec.Combinators.parse_ret_kind t' | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Combinators.tot_parse_fret'",
"Prims.unit",
"LowParse.Spec.Base.parser_kind_prop_equiv",
"LowParse.Spec.Combinators.parse_ret_kind",
"LowParse.Spec.Base.tot_parser"
] | [] | false | false | false | true | false | let tot_parse_fret (#t #t': Type) (f: (t -> Tot t')) (v: t) : Tot (tot_parser parse_ret_kind t') =
| [@@ inline_let ]let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.tot_parse_fret' | val tot_parse_fret' (#t #t': Type) (f: (t -> Tot t')) (v: t) : Tot (tot_bare_parser t') | val tot_parse_fret' (#t #t': Type) (f: (t -> Tot t')) (v: t) : Tot (tot_bare_parser t') | let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b)) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 56,
"end_line": 81,
"start_col": 0,
"start_line": 80
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= () | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: (_: t -> t') -> v: t -> LowParse.Spec.Base.tot_bare_parser t' | Prims.Tot | [
"total"
] | [] | [
"LowParse.Bytes.bytes",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.tot_bare_parser"
] | [] | false | false | false | true | false | let tot_parse_fret' (#t #t': Type) (f: (t -> Tot t')) (v: t) : Tot (tot_bare_parser t') =
| fun (b: bytes) -> Some (f v, (0 <: consumed_length b)) | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.tot_and_then_bare | val tot_and_then_bare (#t #t': Type) (p: tot_bare_parser t) (p': (t -> Tot (tot_bare_parser t')))
: Tot (tot_bare_parser t') | val tot_and_then_bare (#t #t': Type) (p: tot_bare_parser t) (p': (t -> Tot (tot_bare_parser t')))
: Tot (tot_bare_parser t') | let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 18,
"end_line": 46,
"start_col": 0,
"start_line": 30
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= () | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowParse.Spec.Base.tot_bare_parser t -> p': (_: t -> LowParse.Spec.Base.tot_bare_parser t')
-> LowParse.Spec.Base.tot_bare_parser t' | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.tot_bare_parser",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.consumed_length",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.tuple2",
"FStar.Pervasives.Native.Mktuple2",
"Prims.op_Addition",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.option",
"FStar.Seq.Base.slice",
"LowParse.Bytes.byte",
"FStar.Seq.Base.length"
] | [] | false | false | false | true | false | let tot_and_then_bare (#t #t': Type) (p: tot_bare_parser t) (p': (t -> Tot (tot_bare_parser t')))
: Tot (tot_bare_parser t') =
| fun (b: bytes) ->
match p b with
| Some (v, l) ->
let p'v = p' v in
let s':bytes = Seq.slice b l (Seq.length b) in
(match p'v s' with
| Some (v', l') ->
let res:consumed_length b = l + l' in
Some (v', res)
| None -> None)
| None -> None | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.serialize_synth | val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2)) | val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2)) | let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1 | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 34,
"end_line": 109,
"start_col": 0,
"start_line": 95
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
() | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
p1: LowParse.Spec.Base.parser k t1 ->
f2: (_: t1 -> Prims.GTot t2) ->
s1: LowParse.Spec.Base.serializer p1 ->
g1: (_: t2 -> Prims.GTot t1) ->
u622:
u625:
Prims.unit
{ LowParse.Spec.Combinators.synth_inverse f2 g1 /\
LowParse.Spec.Combinators.synth_injective f2 }
-> LowParse.Spec.Base.serializer (LowParse.Spec.Combinators.parse_synth p1 f2) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.unit",
"Prims.l_and",
"LowParse.Spec.Combinators.synth_inverse",
"LowParse.Spec.Combinators.synth_injective",
"LowParse.Spec.Combinators.bare_serialize_synth",
"LowParse.Spec.Combinators.bare_serialize_synth_correct",
"LowParse.Spec.Combinators.parse_synth"
] | [] | false | false | false | false | false | let serialize_synth
(#k: parser_kind)
(#t1 #t2: Type)
(p1: parser k t1)
(f2: (t1 -> GTot t2))
(s1: serializer p1)
(g1: (t2 -> GTot t1))
(u: unit{synth_inverse f2 g1 /\ synth_injective f2})
: Tot (serializer (parse_synth p1 f2)) =
| bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1 | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.tot_and_then | val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
)) | val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
)) | let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 3,
"end_line": 52,
"start_col": 0,
"start_line": 48
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowParse.Spec.Base.tot_parser k t -> p': (_: t -> LowParse.Spec.Base.tot_parser k' t')
-> Prims.Pure (LowParse.Spec.Base.tot_parser (LowParse.Spec.Combinators.and_then_kind k k') t') | Prims.Pure | [] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.tot_parser",
"Prims.unit",
"LowParse.Spec.Base.parser_kind_prop_ext",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.and_then_bare",
"LowParse.Spec.Combinators.and_then_correct",
"LowParse.Spec.Base.tot_bare_parser",
"LowParse.Spec.Combinators.tot_and_then_bare"
] | [] | false | false | false | false | false | let tot_and_then #k #t p #k' #t' p' =
| let f:tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p';
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.parse_synth | val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True)) | val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True)) | let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1)) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 65,
"end_line": 65,
"start_col": 0,
"start_line": 54
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p1: LowParse.Spec.Base.parser k t1 -> f2: (_: t1 -> Prims.GTot t2)
-> Prims.Pure (LowParse.Spec.Base.parser k t2) | Prims.Pure | [] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.coerce",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.parse_ret_kind",
"LowParse.Spec.Combinators.and_then",
"LowParse.Spec.Combinators.parse_fret",
"LowParse.Spec.Combinators.synth_injective",
"Prims.l_True"
] | [] | false | false | false | false | false | let parse_synth (#k: parser_kind) (#t1 #t2: Type) (p1: parser k t1) (f2: (t1 -> GTot t2))
: Pure (parser k t2) (requires (synth_injective f2)) (ensures (fun _ -> True)) =
| coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1)) | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.tot_parse_synth | val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
)) | val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
)) | let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1)) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 77,
"end_line": 90,
"start_col": 0,
"start_line": 88
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p1: LowParse.Spec.Base.tot_parser k t1 -> f2: (_: t1 -> t2)
-> Prims.Pure (LowParse.Spec.Base.tot_parser k t2) | Prims.Pure | [] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.tot_parser",
"LowParse.Spec.Base.coerce",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.parse_ret_kind",
"LowParse.Spec.Combinators.tot_and_then",
"LowParse.Spec.Combinators.tot_parse_fret"
] | [] | false | false | false | false | false | let tot_parse_synth #k #t1 #t2 p1 f2 =
| coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1)) | false |
Pulse.Lib.Reference.fst | Pulse.Lib.Reference.gather2 | val gather2 (#a:Type) (r:ref a) (#x0 #x1:erased a)
: stt_ghost unit
(pts_to r #one_half x0 ** pts_to r #one_half x1)
(fun _ -> pts_to r x0 ** pure (x0 == x1)) | val gather2 (#a:Type) (r:ref a) (#x0 #x1:erased a)
: stt_ghost unit
(pts_to r #one_half x0 ** pts_to r #one_half x1)
(fun _ -> pts_to r x0 ** pure (x0 == x1)) | let gather2 (#a:Type) (r:ref a) (#x0 #x1:erased a)
: stt_ghost unit
(pts_to r #one_half x0 ** pts_to r #one_half x1)
(fun () -> pts_to r x0 ** pure (x0 == x1))
= gather r | {
"file_name": "share/steel/examples/pulse/lib/Pulse.Lib.Reference.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 10,
"end_line": 134,
"start_col": 0,
"start_line": 130
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Lib.Reference
open Pulse.Lib.Core
open Pulse.Main
module H = Pulse.Lib.HigherReference
module U = FStar.Universe
let ref a = H.ref (U.raise_t a)
let pts_to
(#a:Type u#0)
(r:ref a)
(#[exact (`full_perm)] [@@@equate_by_smt] p:perm)
([@@@equate_by_smt] v:a)
= H.pts_to r #p (U.raise_val v)
```pulse
fn alloc' (#a:Type u#0) (v:a)
requires emp
returns r:ref a
ensures pts_to r v
{
let r = H.alloc (U.raise_val v);
fold (pts_to r #full_perm v);
r
}
```
let alloc = alloc'
```pulse
fn read (#a:Type) (r:ref a) (#n:erased a) (#p:perm)
requires pts_to r #p n
returns x:a
ensures pts_to r #p n ** pure (eq2 #a (reveal n) x)
{
unfold (pts_to r #p n);
let k = H.( !r );
fold (pts_to r #p n);
U.downgrade_val k
}
```
let ( ! ) #a = read #a
```pulse
fn write (#a:Type) (r:ref a) (x:a) (#n:erased a)
requires pts_to r #full_perm n
ensures pts_to r #full_perm x
{
unfold (pts_to r #full_perm n);
H.(r := (U.raise_val x));
fold (pts_to r #full_perm x)
}
```
let ( := ) #a r x #n = write #a r x #n
```pulse
fn free' #a (r:ref a) (#n:erased a)
requires pts_to r #full_perm n
ensures emp
{
unfold (pts_to r #full_perm n);
H.free r;
}
```
let free = free'
```pulse
ghost
fn share' (#a:Type) (r:ref a) (#v:erased a) (#p:perm)
requires pts_to r #p v
ensures pts_to r #(half_perm p) v ** pts_to r #(half_perm p) v
{
unfold pts_to r #p v;
H.share r;
fold pts_to r #(half_perm p) v;
fold pts_to r #(half_perm p) v
}
```
let share = share'
```pulse
ghost
fn raise_inj (a:Type u#0) (x0 x1:a)
requires pure (U.raise_val u#0 u#1 x0 == U.raise_val u#0 u#1 x1)
ensures pure (x0 == x1)
{
assert pure (U.downgrade_val (U.raise_val u#0 u#1 x0) == x0);
assert pure (U.downgrade_val (U.raise_val u#0 u#1 x1) == x1);
}
```
```pulse
ghost
fn gather' (#a:Type) (r:ref a) (#x0 #x1:erased a) (#p0 #p1:perm)
requires pts_to r #p0 x0 ** pts_to r #p1 x1
ensures pts_to r #(sum_perm p0 p1) x0 ** pure (x0 == x1)
{
unfold pts_to r #p0 x0;
unfold pts_to r #p1 x1;
H.gather r;
fold (pts_to r #(sum_perm p1 p0) x0);
let qq = sum_perm p0 p1; //hack! prevent the unifier from structurally matching sum_perm p0 p1 with sum_perm p1 p0
rewrite (pts_to r #(sum_perm p1 p0) x0)
as (pts_to r #qq x0);
raise_inj a x0 x1;
}
```
let gather = gather'
let share2 (#a:Type) (r:ref a) (#v:erased a)
: stt_ghost unit
(pts_to r v)
(fun _ -> pts_to r #one_half v ** pts_to r #one_half v)
= share #a r #v | {
"checked_file": "/",
"dependencies": [
"Pulse.Main.fsti.checked",
"Pulse.Lib.HigherReference.fsti.checked",
"Pulse.Lib.Core.fsti.checked",
"prims.fst.checked",
"FStar.Universe.fsti.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Lib.Reference.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Universe",
"short_module": "U"
},
{
"abbrev": true,
"full_module": "Pulse.Lib.HigherReference",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "Pulse.Main",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "PulseCore.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Lib.Core",
"short_module": null
},
{
"abbrev": false,
"full_module": "PulseCore.Observability",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: Pulse.Lib.Reference.ref a
-> Pulse.Lib.Core.stt_ghost Prims.unit
(Pulse.Lib.Reference.pts_to r (FStar.Ghost.reveal x0) **
Pulse.Lib.Reference.pts_to r (FStar.Ghost.reveal x1))
(fun _ ->
Pulse.Lib.Reference.pts_to r (FStar.Ghost.reveal x0) ** Pulse.Lib.Core.pure (x0 == x1)) | Prims.Tot | [
"total"
] | [] | [
"Pulse.Lib.Reference.ref",
"FStar.Ghost.erased",
"Pulse.Lib.Reference.gather",
"Pulse.Lib.Core.one_half",
"Pulse.Lib.Core.stt_ghost",
"Prims.unit",
"Pulse.Lib.Core.op_Star_Star",
"Pulse.Lib.Reference.pts_to",
"FStar.Ghost.reveal",
"PulseCore.FractionalPermission.sum_perm",
"Pulse.Lib.Core.pure",
"Prims.eq2",
"Pulse.Lib.Core.vprop",
"PulseCore.FractionalPermission.full_perm"
] | [] | false | false | false | false | false | let gather2 (#a: Type) (r: ref a) (#x0 #x1: erased a)
: stt_ghost unit
(pts_to r #one_half x0 ** pts_to r #one_half x1)
(fun () -> pts_to r x0 ** pure (x0 == x1)) =
| gather r | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.serialize_tagged_union | val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True)) | val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True)) | let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 46,
"end_line": 285,
"start_col": 0,
"start_line": 271
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
st: LowParse.Spec.Base.serializer pt ->
tag_of_data: (_: data_t -> Prims.GTot tag_t) ->
s: (t: tag_t -> LowParse.Spec.Base.serializer (p t))
-> Prims.Pure
(LowParse.Spec.Base.serializer (LowParse.Spec.Combinators.parse_tagged_union pt tag_of_data p)) | Prims.Pure | [] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Spec.Combinators.bare_serialize_tagged_union",
"Prims.unit",
"LowParse.Spec.Combinators.bare_serialize_tagged_union_correct",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.parse_tagged_union",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"Prims.l_True"
] | [] | false | false | false | false | false | let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t -> Tot (parser k (refine_with_tag tag_of_data t))))
(s: (t: tag_t -> Tot (serializer (p t))))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True)) =
| bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.serialize_dtuple2 | val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2)) | val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2)) | let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ()) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 95,
"end_line": 317,
"start_col": 0,
"start_line": 304
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= () | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s1:
LowParse.Spec.Base.serializer p1
{ Mkparser_kind'?.parser_kind_subkind k1 ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong } ->
s2: (x: t1 -> LowParse.Spec.Base.serializer (p2 x))
-> LowParse.Spec.Base.serializer (LowParse.Spec.Combinators.parse_dtuple2 p1 p2) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"LowParse.Spec.Combinators.serialize_tagged_union",
"Prims.dtuple2",
"FStar.Pervasives.dfst",
"LowParse.Spec.Combinators.parse_synth",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Spec.Combinators.synth_dtuple2",
"LowParse.Spec.Combinators.serialize_synth",
"LowParse.Spec.Combinators.synth_dtuple2_recip",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.parse_dtuple2"
] | [] | false | false | false | false | false | let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong})
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1 -> parser k2 (t2 x)))
(s2: (x: t1 -> serializer (p2 x)))
: Tot (serializer (parse_dtuple2 p1 p2)) =
| serialize_tagged_union s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ()) | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.tot_nondep_then_bare | val tot_nondep_then_bare (#t1: Type) (p1: tot_bare_parser t1) (#t2: Type) (p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2)) | val tot_nondep_then_bare (#t1: Type) (p1: tot_bare_parser t1) (#t2: Type) (p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2)) | let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 13,
"end_line": 412,
"start_col": 0,
"start_line": 398
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= () | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p1: LowParse.Spec.Base.tot_bare_parser t1 -> p2: LowParse.Spec.Base.tot_bare_parser t2
-> LowParse.Spec.Base.tot_bare_parser (t1 * t2) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.tot_bare_parser",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.consumed_length",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.tuple2",
"FStar.Pervasives.Native.Mktuple2",
"Prims.op_Addition",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.None",
"FStar.Seq.Base.seq",
"LowParse.Bytes.byte",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length"
] | [] | false | false | false | true | false | let tot_nondep_then_bare (#t1: Type) (p1: tot_bare_parser t1) (#t2: Type) (p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2)) =
| fun b ->
match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
(match p2 b' with
| Some (x2, consumed2) -> Some ((x1, x2), consumed1 + consumed2)
| _ -> None)
| _ -> None | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.nondep_then | val nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2)) | val nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2)) | let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x)) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 72,
"end_line": 370,
"start_col": 0,
"start_line": 359
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p1: LowParse.Spec.Base.parser k1 t1 -> p2: LowParse.Spec.Base.parser k2 t2
-> LowParse.Spec.Base.parser (LowParse.Spec.Combinators.and_then_kind k1 k2) (t1 * t2) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Combinators.parse_tagged_union",
"FStar.Pervasives.Native.tuple2",
"FStar.Pervasives.Native.fst",
"LowParse.Spec.Combinators.parse_synth",
"LowParse.Spec.Base.refine_with_tag",
"FStar.Pervasives.Native.Mktuple2",
"LowParse.Spec.Combinators.and_then_kind"
] | [] | false | false | false | false | false | let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2)) =
| parse_tagged_union p1 fst (fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x)) | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.serialize_synth_upd_chain | val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
)) | val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
)) | let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 36,
"end_line": 162,
"start_col": 0,
"start_line": 128
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= () | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
p1: LowParse.Spec.Base.parser k t1 ->
f2: (_: t1 -> Prims.GTot t2) ->
s1: LowParse.Spec.Base.serializer p1 ->
g1: (_: t2 -> Prims.GTot t1) ->
u769:
u784:
Prims.unit
{ LowParse.Spec.Combinators.synth_inverse f2 g1 /\
LowParse.Spec.Combinators.synth_injective f2 } ->
x1: t1 ->
x2: t2 ->
y1: t1 ->
y2: t2 ->
i': Prims.nat ->
s': LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(requires
(let s = LowParse.Spec.Base.serialize s1 x1 in
i' + FStar.Seq.Base.length s' <= FStar.Seq.Base.length s /\
LowParse.Spec.Base.serialize s1 y1 == LowParse.Spec.Base.seq_upd_seq s i' s' /\
x2 == f2 x1 /\ y2 == f2 y1))
(ensures
(let s =
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_synth p1 f2 s1 g1 u769
)
x2
in
i' + FStar.Seq.Base.length s' <= FStar.Seq.Base.length s /\
FStar.Seq.Base.length s == FStar.Seq.Base.length (LowParse.Spec.Base.serialize s1 x1) /\
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_synth p1 f2 s1 g1 u769)
y2 ==
LowParse.Spec.Base.seq_upd_seq s i' s')) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.unit",
"Prims.l_and",
"LowParse.Spec.Combinators.synth_inverse",
"LowParse.Spec.Combinators.synth_injective",
"Prims.nat",
"LowParse.Bytes.bytes",
"Prims._assert",
"Prims.l_Forall",
"Prims.eq2",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"FStar.Seq.Base.seq",
"LowParse.Spec.Base.serialize",
"LowParse.Spec.Base.seq_upd_seq",
"Prims.squash",
"LowParse.Spec.Combinators.parse_synth",
"LowParse.Spec.Combinators.serialize_synth",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | true | false | false | let serialize_synth_upd_chain
(#k: parser_kind)
(#t1 #t2: Type)
(p1: parser k t1)
(f2: (t1 -> GTot t2))
(s1: serializer p1)
(g1: (t2 -> GTot t1))
(u: unit{synth_inverse f2 g1 /\ synth_injective f2})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s': bytes)
: Lemma
(requires
(let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\ serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\ y2 == f2 y1))
(ensures
(let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\ Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s')) =
| assert (forall w w'. f2 w == f2 w' ==> w == w');
assert (forall w. f2 (g1 w) == w) | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.serialize_synth_upd_bw_chain | val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
)) | val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
)) | let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 36,
"end_line": 198,
"start_col": 0,
"start_line": 164
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
p1: LowParse.Spec.Base.parser k t1 ->
f2: (_: t1 -> Prims.GTot t2) ->
s1: LowParse.Spec.Base.serializer p1 ->
g1: (_: t2 -> Prims.GTot t1) ->
u825:
u840:
Prims.unit
{ LowParse.Spec.Combinators.synth_inverse f2 g1 /\
LowParse.Spec.Combinators.synth_injective f2 } ->
x1: t1 ->
x2: t2 ->
y1: t1 ->
y2: t2 ->
i': Prims.nat ->
s': LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(requires
(let s = LowParse.Spec.Base.serialize s1 x1 in
i' + FStar.Seq.Base.length s' <= FStar.Seq.Base.length s /\
LowParse.Spec.Base.serialize s1 y1 == LowParse.Spec.Base.seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\ y2 == f2 y1))
(ensures
(let s =
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_synth p1 f2 s1 g1 u825
)
x2
in
i' + FStar.Seq.Base.length s' <= FStar.Seq.Base.length s /\
FStar.Seq.Base.length s == FStar.Seq.Base.length (LowParse.Spec.Base.serialize s1 x1) /\
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_synth p1 f2 s1 g1 u825)
y2 ==
LowParse.Spec.Base.seq_upd_bw_seq s i' s')) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.unit",
"Prims.l_and",
"LowParse.Spec.Combinators.synth_inverse",
"LowParse.Spec.Combinators.synth_injective",
"Prims.nat",
"LowParse.Bytes.bytes",
"Prims._assert",
"Prims.l_Forall",
"Prims.eq2",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"FStar.Seq.Base.seq",
"LowParse.Spec.Base.serialize",
"LowParse.Spec.Base.seq_upd_bw_seq",
"Prims.squash",
"LowParse.Spec.Combinators.parse_synth",
"LowParse.Spec.Combinators.serialize_synth",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | true | false | false | let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1 #t2: Type)
(p1: parser k t1)
(f2: (t1 -> GTot t2))
(s1: serializer p1)
(g1: (t2 -> GTot t1))
(u: unit{synth_inverse f2 g1 /\ synth_injective f2})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s': bytes)
: Lemma
(requires
(let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\ serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\ y2 == f2 y1))
(ensures
(let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\ Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s')) =
| assert (forall w w'. f2 w == f2 w' ==> w == w');
assert (forall w. f2 (g1 w) == w) | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.parse_tagged_union_eq | val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
)) | val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
)) | let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 112,
"end_line": 229,
"start_col": 0,
"start_line": 204
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
pt: LowParse.Spec.Base.parser kt tag_t ->
tag_of_data: (_: data_t -> Prims.GTot tag_t) ->
p: (t: tag_t -> LowParse.Spec.Base.parser k (LowParse.Spec.Base.refine_with_tag tag_of_data t)) ->
input: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Base.parse (LowParse.Spec.Combinators.parse_tagged_union pt tag_of_data p) input ==
((match LowParse.Spec.Base.parse pt input with
| FStar.Pervasives.Native.None #_ -> FStar.Pervasives.Native.None
| FStar.Pervasives.Native.Some #_ (FStar.Pervasives.Native.Mktuple2 #_ #_ tg consumed_tg) ->
let input_tg = FStar.Seq.Base.slice input consumed_tg (FStar.Seq.Base.length input) in
(match LowParse.Spec.Base.parse (p tg) input_tg with
| FStar.Pervasives.Native.Some
#_
(FStar.Pervasives.Native.Mktuple2 #_ #_ x consumed_x) ->
FStar.Pervasives.Native.Some (x, consumed_tg + consumed_x)
| FStar.Pervasives.Native.None #_ -> FStar.Pervasives.Native.None)
<:
FStar.Pervasives.Native.option (data_t * LowParse.Spec.Base.consumed_length input))
<:
FStar.Pervasives.Native.option (data_t * LowParse.Spec.Base.consumed_length input))) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Combinators.parse_synth_eq",
"LowParse.Spec.Combinators.synth_tagged_union_data",
"FStar.Seq.Base.seq",
"LowParse.Bytes.byte",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length",
"Prims.unit",
"LowParse.Spec.Combinators.and_then_eq",
"LowParse.Spec.Combinators.parse_tagged_union_payload",
"LowParse.Spec.Combinators.parse_tagged_union_payload_and_then_cases_injective",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Combinators.parse_tagged_union",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"Prims.op_Addition",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | true | false | false | let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t -> Tot (parser k (refine_with_tag tag_of_data t))))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input ==
(match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None)) =
| parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k
#(refine_with_tag tag_of_data tg)
(p tg)
(synth_tagged_union_data tag_of_data tg)
input_tg | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.serialize_nondep_then | val serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2)) | val serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2)) | let serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
= serialize_tagged_union
s1
fst
(fun x -> serialize_synth p2 (fun y -> (x, y) <: refine_with_tag fst x) s2 (fun (xy: refine_with_tag fst x) -> snd xy) ()) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 126,
"end_line": 432,
"start_col": 0,
"start_line": 419
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= ()
let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 =
Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s1:
LowParse.Spec.Base.serializer p1
{ Mkparser_kind'?.parser_kind_subkind k1 ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong } ->
s2: LowParse.Spec.Base.serializer p2
-> LowParse.Spec.Base.serializer (LowParse.Spec.Combinators.nondep_then p1 p2) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"LowParse.Spec.Combinators.serialize_tagged_union",
"FStar.Pervasives.Native.tuple2",
"FStar.Pervasives.Native.fst",
"LowParse.Spec.Combinators.parse_synth",
"LowParse.Spec.Base.refine_with_tag",
"FStar.Pervasives.Native.Mktuple2",
"LowParse.Spec.Combinators.serialize_synth",
"FStar.Pervasives.Native.snd",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.nondep_then"
] | [] | false | false | false | false | false | let serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong})
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2)) =
| serialize_tagged_union s1
fst
(fun x ->
serialize_synth p2
(fun y -> (x, y) <: refine_with_tag fst x)
s2
(fun (xy: refine_with_tag fst x) -> snd xy)
()) | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.parse_tagged_union_eq_gen | val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input) | val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input) | let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 22,
"end_line": 265,
"start_col": 0,
"start_line": 231
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
pt: LowParse.Spec.Base.parser kt tag_t ->
tag_of_data: (_: data_t -> Prims.GTot tag_t) ->
p: (t: tag_t -> LowParse.Spec.Base.parser k (LowParse.Spec.Base.refine_with_tag tag_of_data t)) ->
pt': LowParse.Spec.Base.parser kt' tag_t ->
lem_pt:
(input: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(ensures LowParse.Spec.Base.parse pt input == LowParse.Spec.Base.parse pt' input)) ->
k': (t: tag_t -> LowParse.Spec.Base.parser_kind) ->
p':
(t: tag_t
-> LowParse.Spec.Base.parser (k' t) (LowParse.Spec.Base.refine_with_tag tag_of_data t)) ->
lem_p':
(k: tag_t -> input: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(ensures LowParse.Spec.Base.parse (p k) input == LowParse.Spec.Base.parse (p' k) input)) ->
input: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Base.parse (LowParse.Spec.Combinators.parse_tagged_union pt tag_of_data p) input ==
LowParse.Spec.Combinators.bare_parse_tagged_union pt' tag_of_data k' p' input) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Bytes.bytes",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowParse.Spec.Combinators.parse_synth_eq",
"LowParse.Spec.Combinators.synth_tagged_union_data",
"FStar.Seq.Base.seq",
"LowParse.Bytes.byte",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length",
"LowParse.Spec.Combinators.and_then_eq",
"LowParse.Spec.Combinators.parse_tagged_union_payload",
"LowParse.Spec.Combinators.parse_tagged_union_payload_and_then_cases_injective",
"LowParse.Spec.Combinators.parse_tagged_union",
"LowParse.Spec.Combinators.bare_parse_tagged_union"
] | [] | false | false | true | false | false | let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t -> Tot (parser k (refine_with_tag tag_of_data t))))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (input: bytes -> Lemma (parse pt input == parse pt' input)))
(k': (t: tag_t -> Tot parser_kind))
(p': (t: tag_t -> Tot (parser (k' t) (refine_with_tag tag_of_data t))))
(lem_p': (k: tag_t -> input: bytes -> Lemma (parse (p k) input == parse (p' k) input)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input ==
bare_parse_tagged_union pt' tag_of_data k' p' input) =
| parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k
#(refine_with_tag tag_of_data tg)
(p tg)
(synth_tagged_union_data tag_of_data tg)
input_tg;
lem_p' tg input_tg | false |
FStar.Matrix.fsti | FStar.Matrix.flattened_index_is_under_flattened_size | val flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i * n) + j)) < m * n) | val flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i * n) + j)) < m * n) | let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n) | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 55,
"end_line": 46,
"start_col": 0,
"start_line": 45
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
m: Prims.pos ->
n: Prims.pos ->
i: FStar.IntegerIntervals.under m ->
j: FStar.IntegerIntervals.under n
-> FStar.Pervasives.Lemma (ensures i * n + j < m * n) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Prims.pos",
"FStar.IntegerIntervals.under",
"Prims._assert",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.Mul.op_Star",
"Prims.op_Subtraction",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.op_LessThan",
"Prims.op_Addition",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i * n) + j)) < m * n) =
| assert (i * n <= (m - 1) * n) | false |
FStar.Matrix.fsti | FStar.Matrix.get_ij | val get_ij (m n: pos) (i: under m) (j: under n) : under (m * n) | val get_ij (m n: pos) (i: under m) (j: under n) : under (m * n) | let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 60,
"end_line": 51,
"start_col": 0,
"start_line": 50
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
m: Prims.pos ->
n: Prims.pos ->
i: FStar.IntegerIntervals.under m ->
j: FStar.IntegerIntervals.under n
-> FStar.IntegerIntervals.under (m * n) | Prims.Tot | [
"total"
] | [] | [
"Prims.pos",
"FStar.IntegerIntervals.under",
"Prims.op_Addition",
"FStar.Mul.op_Star",
"Prims.unit",
"FStar.Matrix.flattened_index_is_under_flattened_size"
] | [] | false | false | false | false | false | let get_ij (m n: pos) (i: under m) (j: under n) : under (m * n) =
| flattened_index_is_under_flattened_size m n i j;
i * n + j | false |
FStar.Matrix.fsti | FStar.Matrix.transpose_ji | val transpose_ji (m n: pos) (ij: under (m * n)) : under (n * m) | val transpose_ji (m n: pos) (ij: under (m * n)) : under (n * m) | let transpose_ji (m n: pos) (ij: under (m*n)) : under (n*m) =
flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij);
(get_j m n ij)*m + (get_i m n ij) | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 35,
"end_line": 74,
"start_col": 0,
"start_line": 72
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *)
let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n
let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n
(* A proof that getting a 2D index back from the flattened
index works correctly *)
let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
flattened_index_is_under_flattened_size m n i j; //speeds up the proof
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n
(* A proof that getting the flattened index from 2D
indices works correctly *)
let consistency_of_ij (m n: pos) (ij: under (m*n))
: Lemma (get_ij m n (get_i m n ij) (get_j m n ij) == ij) = () | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: Prims.pos -> n: Prims.pos -> ij: FStar.IntegerIntervals.under (m * n)
-> FStar.IntegerIntervals.under (n * m) | Prims.Tot | [
"total"
] | [] | [
"Prims.pos",
"FStar.IntegerIntervals.under",
"FStar.Mul.op_Star",
"Prims.op_Addition",
"FStar.Matrix.get_j",
"FStar.Matrix.get_i",
"Prims.unit",
"FStar.Matrix.flattened_index_is_under_flattened_size"
] | [] | false | false | false | false | false | let transpose_ji (m n: pos) (ij: under (m * n)) : under (n * m) =
| flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij);
(get_j m n ij) * m + (get_i m n ij) | false |
FStar.Matrix.fsti | FStar.Matrix.get_i | val get_i (m n: pos) (ij: under (m * n)) : under m | val get_i (m n: pos) (ij: under (m * n)) : under m | let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 57,
"end_line": 55,
"start_col": 0,
"start_line": 55
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: Prims.pos -> n: Prims.pos -> ij: FStar.IntegerIntervals.under (m * n)
-> FStar.IntegerIntervals.under m | Prims.Tot | [
"total"
] | [] | [
"Prims.pos",
"FStar.IntegerIntervals.under",
"FStar.Mul.op_Star",
"Prims.op_Division"
] | [] | false | false | false | false | false | let get_i (m n: pos) (ij: under (m * n)) : under m =
| ij / n | false |
FStar.Matrix.fsti | FStar.Matrix.get_j | val get_j (m n: pos) (ij: under (m * n)) : under n | val get_j (m n: pos) (ij: under (m * n)) : under n | let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 57,
"end_line": 56,
"start_col": 0,
"start_line": 56
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: Prims.pos -> n: Prims.pos -> ij: FStar.IntegerIntervals.under (m * n)
-> FStar.IntegerIntervals.under n | Prims.Tot | [
"total"
] | [] | [
"Prims.pos",
"FStar.IntegerIntervals.under",
"FStar.Mul.op_Star",
"Prims.op_Modulus"
] | [] | false | false | false | false | false | let get_j (m n: pos) (ij: under (m * n)) : under n =
| ij % n | false |
FStar.Matrix.fsti | FStar.Matrix.indices_transpose_lemma | val indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j * m + i) % m = i) && ((j * m + i) / m = j)) | val indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j * m + i) % m = i) && ((j * m + i) / m = j)) | let indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j*m+i)%m=i) && ((j*m+i)/m=j)) = ML.lemma_mod_plus i j m | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 68,
"end_line": 78,
"start_col": 0,
"start_line": 77
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *)
let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n
let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n
(* A proof that getting a 2D index back from the flattened
index works correctly *)
let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
flattened_index_is_under_flattened_size m n i j; //speeds up the proof
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n
(* A proof that getting the flattened index from 2D
indices works correctly *)
let consistency_of_ij (m n: pos) (ij: under (m*n))
: Lemma (get_ij m n (get_i m n ij) (get_j m n ij) == ij) = ()
(* The transposition transform for the flattened index *)
let transpose_ji (m n: pos) (ij: under (m*n)) : under (n*m) =
flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij);
(get_j m n ij)*m + (get_i m n ij) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: Prims.pos -> i: FStar.IntegerIntervals.under m -> j: Prims.nat
-> FStar.Pervasives.Lemma (ensures (j * m + i) % m = i && (j * m + i) / m = j) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Prims.pos",
"FStar.IntegerIntervals.under",
"Prims.nat",
"FStar.Math.Lemmas.lemma_mod_plus",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.b2t",
"Prims.op_AmpAmp",
"Prims.op_Equality",
"Prims.int",
"Prims.op_Modulus",
"Prims.op_Addition",
"FStar.Mul.op_Star",
"Prims.op_Division",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j * m + i) % m = i) && ((j * m + i) / m = j)) =
| ML.lemma_mod_plus i j m | false |
FStar.Matrix.fsti | FStar.Matrix.matrix_add | val matrix_add (#c #eq: _) (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_of (matrix_add_generator add ma mb) | val matrix_add (#c #eq: _) (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_of (matrix_add_generator add ma mb) | let matrix_add #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_of (matrix_add_generator add ma mb)
= init (matrix_add_generator add ma mb) | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 41,
"end_line": 209,
"start_col": 0,
"start_line": 207
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *)
let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n
let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n
(* A proof that getting a 2D index back from the flattened
index works correctly *)
let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
flattened_index_is_under_flattened_size m n i j; //speeds up the proof
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n
(* A proof that getting the flattened index from 2D
indices works correctly *)
let consistency_of_ij (m n: pos) (ij: under (m*n))
: Lemma (get_ij m n (get_i m n ij) (get_j m n ij) == ij) = ()
(* The transposition transform for the flattened index *)
let transpose_ji (m n: pos) (ij: under (m*n)) : under (n*m) =
flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij);
(get_j m n ij)*m + (get_i m n ij)
(* Auxiliary arithmetic lemma *)
let indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j*m+i)%m=i) && ((j*m+i)/m=j)) = ML.lemma_mod_plus i j m
(* A proof of trasnspotition transform bijectivity *)
let ji_is_transpose_of_ij (m n: pos) (ij: under (m*n))
: Lemma (transpose_ji n m (transpose_ji m n ij) = ij) =
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A proof that 2D indices are swapped with the transpotition transform *)
let dual_indices (m n: pos) (ij: under (m*n)) : Lemma (
(get_j n m (transpose_ji m n ij) = get_i m n ij) /\
(get_i n m (transpose_ji m n ij) = get_j m n ij))
= consistency_of_ij m n ij;
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A matrix can always be treated as a flattened seq *)
val seq_of_matrix : (#c: Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) ->
(s:SB.seq c {
SB.length s=m*n /\
(forall (ij: under (m*n)). SB.index s ij == SB.index s (get_ij m n (get_i m n ij) (get_j m n ij)))
})
(* Indexer for a matrix *)
val ijth : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
(t:c{t == SB.index (seq_of_matrix mx) (get_ij m n i j)})
(* Indexer for a matrix returns the correct value *)
val ijth_lemma : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
Lemma (ijth mx i j == SB.index (seq_of_matrix mx) (get_ij m n i j))
(* A matrix can always be constructed from an m*n-sized seq *)
val matrix_of_seq : (#c: Type) -> (m:pos) -> (n:pos) -> (s: SB.seq c{SB.length s = m*n}) -> matrix c m n
(* A type for matrices constructed via concrete generator *)
type matrix_of #c (#m #n: pos) (gen: matrix_generator c m n) = z:matrix c m n {
(forall (i: under m) (j: under n). ijth z i j == gen i j) /\
(forall (ij: under (m*n)). (SB.index (seq_of_matrix z) ij) == (gen (get_i m n ij) (get_j m n ij)))
}
(* Monoid-based fold of a matrix treated as a flat seq *)
val foldm : (#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n) -> c
(* foldm_snoc of the corresponding seq is equal to foldm of the matrix *)
val matrix_fold_equals_fold_of_seq :
(#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n)
-> Lemma (ensures foldm cm mx `eq.eq` SP.foldm_snoc cm (seq_of_matrix mx)) [SMTPat(foldm cm mx)]
(* A matrix constructed from given generator *)
val init : (#c:Type) -> (#m:pos) -> (#n: pos) -> (generator: matrix_generator c m n)
-> matrix_of generator
(* A matrix fold is equal to double foldm_snoc over init-generated seq of seqs *)
val matrix_fold_equals_fold_of_seq_folds : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (ensures foldm cm (init generator) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
/\ SP.foldm_snoc cm (seq_of_matrix (init generator)) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
)
(* This auxiliary lemma shows that the fold of the last line of a matrix
is equal to the corresponding fold of the generator function *)
(* This lemma establishes that the fold of a matrix is equal to
nested Algebra.CommMonoid.Fold.fold over the matrix generator *)
val matrix_fold_equals_func_double_fold : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (foldm cm (init generator) `eq.eq`
CF.fold cm 0 (m-1) (fun (i:under m) -> CF.fold cm 0 (n-1) (generator i)))
val transposed_matrix_gen (#c:_) (#m:pos) (#n:pos) (generator: matrix_generator c m n)
: (f: matrix_generator c n m { forall i j. f j i == generator i j })
val matrix_transpose_is_permutation (#c:_) (#m #n: pos)
(generator: matrix_generator c m n)
: Lemma (SP.is_permutation (seq_of_matrix (init generator))
(seq_of_matrix (init (transposed_matrix_gen generator)))
(transpose_ji m n))
val matrix_fold_equals_fold_of_transpose (#c:_) (#eq:_)
(#m #n: pos)
(cm: CE.cm c eq)
(gen: matrix_generator c m n)
: Lemma (foldm cm (init gen) `eq.eq`
foldm cm (init (transposed_matrix_gen gen)))
(* The equivalence relation defined for matrices of given dimensions *)
val matrix_equiv : (#c: Type) ->
(eq: CE.equiv c) ->
(m: pos) -> (n: pos) ->
CE.equiv (matrix c m n)
(* element-wise matrix equivalence lemma *)
val matrix_equiv_ijth (#c:_) (#m #n: pos) (eq: CE.equiv c)
(ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (requires (matrix_equiv eq m n).eq ma mb)
(ensures ijth ma i j `eq.eq` ijth mb i j)
(* We can always establish matrix equivalence from element-wise equivalence *)
val matrix_equiv_from_element_eq (#c:_) (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
: Lemma (requires (forall (i: under m) (j: under n). ijth ma i j `eq.eq` ijth mb i j))
(ensures (matrix_equiv eq m n).eq ma mb)
(*
Notice that even though we can (and will) construct CommMonoid for matrix addition,
we still publish the operations as well since as soon as we get to multiplication,
results usually have different dimensions, so it would be convenient to have both
the CommMonoid for matrix addition and the explicit addition function.
This becomes the only way with non-square matrix multiplication, since these
would not constitute a monoid to begin with.
*)
(* This version of the lemma is useful if we don't want to invoke
Classical.forall_intro_2 in a big proof to conserve resources *)
let matrix_equiv_from_proof #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
(proof: (i:under m) -> (j:under n) -> Lemma (eq.eq (ijth ma i j) (ijth mb i j)))
: Lemma ((matrix_equiv eq m n).eq ma mb)
= Classical.forall_intro_2 proof;
matrix_equiv_from_element_eq eq ma mb
(* This one is the generator function for sum of matrices *)
let matrix_add_generator #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_generator c m n = fun i j -> add.mult (ijth ma i j) (ijth mb i j) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
add: FStar.Algebra.CommMonoid.Equiv.cm c eq ->
ma: FStar.Matrix.matrix c m n ->
mb: FStar.Matrix.matrix c m n
-> FStar.Matrix.matrix_of (FStar.Matrix.matrix_add_generator add ma mb) | Prims.Tot | [
"total"
] | [] | [
"FStar.Algebra.CommMonoid.Equiv.equiv",
"Prims.pos",
"FStar.Algebra.CommMonoid.Equiv.cm",
"FStar.Matrix.matrix",
"FStar.Matrix.init",
"FStar.Matrix.matrix_add_generator",
"FStar.Matrix.matrix_of"
] | [] | false | false | false | false | false | let matrix_add #c #eq (#m: pos) (#n: pos) (add: CE.cm c eq) (ma: matrix c m n) (mb: matrix c m n)
: matrix_of (matrix_add_generator add ma mb) =
| init (matrix_add_generator add ma mb) | false |
FStar.Matrix.fsti | FStar.Matrix.consistency_of_i_j | val consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) | val consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) | let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
flattened_index_is_under_flattened_size m n i j; //speeds up the proof
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 25,
"end_line": 64,
"start_col": 0,
"start_line": 60
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *)
let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n
let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n
(* A proof that getting a 2D index back from the flattened | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
m: Prims.pos ->
n: Prims.pos ->
i: FStar.IntegerIntervals.under m ->
j: FStar.IntegerIntervals.under n
-> FStar.Pervasives.Lemma
(ensures
FStar.Matrix.get_i m n (FStar.Matrix.get_ij m n i j) = i /\
FStar.Matrix.get_j m n (FStar.Matrix.get_ij m n i j) = j) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Prims.pos",
"FStar.IntegerIntervals.under",
"FStar.Math.Lemmas.lemma_div_plus",
"Prims.unit",
"FStar.Math.Lemmas.lemma_mod_plus",
"FStar.Matrix.flattened_index_is_under_flattened_size",
"Prims.l_True",
"Prims.squash",
"Prims.l_and",
"Prims.b2t",
"Prims.op_Equality",
"FStar.Matrix.get_i",
"FStar.Matrix.get_ij",
"FStar.Matrix.get_j",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
| flattened_index_is_under_flattened_size m n i j;
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.tot_parse_filter_payload | val tot_parse_filter_payload (#t: Type) (f: (t -> Tot bool)) (v: t)
: Tot (tot_parser parse_filter_payload_kind (parse_filter_refine f)) | val tot_parse_filter_payload (#t: Type) (f: (t -> Tot bool)) (v: t)
: Tot (tot_parser parse_filter_payload_kind (parse_filter_refine f)) | let tot_parse_filter_payload
(#t: Type)
(f: (t -> Tot bool))
(v: t)
: Tot (tot_parser parse_filter_payload_kind (parse_filter_refine f))
= let p : tot_bare_parser (parse_filter_refine f) =
if f v
then
let v' : (x: t { f x == true } ) = v in
tot_weaken parse_filter_payload_kind (tot_parse_ret v')
else tot_fail_parser parse_filter_payload_kind (parse_filter_refine f)
in
parser_kind_prop_equiv parse_filter_payload_kind p;
p | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 3,
"end_line": 689,
"start_col": 0,
"start_line": 676
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= ()
let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 =
Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2
let serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
= serialize_tagged_union
s1
fst
(fun x -> serialize_synth p2 (fun y -> (x, y) <: refine_with_tag fst x) s2 (fun (xy: refine_with_tag fst x) -> snd xy) ())
let serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
= ()
let length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
= ()
let serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_left s (serialize s1 y);
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x))
let serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
= serialize_nondep_then_upd_left s1 s2 x y;
let s = serialize (serialize_nondep_then s1 s2) x in
let s1' = serialize s1 (fst x) in
let l1 = Seq.length s1' in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) s1' (serialize s2 (snd x));
seq_upd_seq_right_to_left s 0 s1' i' s';
seq_upd_seq_slice_idem s 0 (Seq.length s1')
let serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
= serialize_nondep_then_upd_left s1 s2 x y
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
= let j' = Seq.length (serialize s1 (fst x)) - i' - Seq.length s' in
serialize_nondep_then_upd_left_chain s1 s2 x y j' s';
assert (j' == Seq.length (serialize (serialize_nondep_then s1 s2) x) - (Seq.length (serialize s2 (snd x)) + i') - Seq.length s')
let serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_right s (serialize s2 y);
let l2 = Seq.length s - Seq.length (serialize s2 (snd x)) in
Seq.lemma_split s l2;
Seq.lemma_append_inj (Seq.slice s 0 l2) (Seq.slice s l2 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x))
let serialize_nondep_then_upd_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
l1 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s'
))
= serialize_nondep_then_upd_right s1 s2 x y;
let s = serialize (serialize_nondep_then s1 s2) x in
let s2' = serialize s2 (snd x) in
let l2 = Seq.length s - Seq.length s2' in
Seq.lemma_split s l2;
Seq.lemma_append_inj (Seq.slice s 0 l2) (Seq.slice s l2 (Seq.length s)) (serialize s1 (fst x)) s2';
seq_upd_seq_right_to_left s l2 s2' i' s';
seq_upd_seq_slice_idem s l2 (Seq.length s)
#reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let make_total_constant_size_parser_compose
(sz: nat)
(t1 t2: Type)
(f1: ((s: bytes {Seq.length s == sz}) -> GTot t1))
(g2: t1 -> GTot t2)
: Lemma
(requires (
make_total_constant_size_parser_precond sz t1 f1 /\
(forall x x' . g2 x == g2 x' ==> x == x')
))
(ensures (
make_total_constant_size_parser_precond sz t1 f1 /\
make_total_constant_size_parser_precond sz t2 (f1 `compose` g2) /\
(forall x x' . {:pattern (g2 x); (g2 x')} g2 x == g2 x' ==> x == x') /\
(forall input . {:pattern (parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input)} parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input == parse (make_total_constant_size_parser sz t1 f1 `parse_synth` g2) input)
))
= ()
let parse_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(f: (t -> GTot bool))
: Tot (parser (parse_filter_kind k) (parse_filter_refine f))
= p `and_then` (parse_filter_payload f)
let parse_filter_eq
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(f: (t -> GTot bool))
(input: bytes)
: Lemma
(parse (parse_filter p f) input == (match parse p input with
| None -> None
| Some (x, consumed) ->
if f x
then Some (x, consumed)
else None
))
= () | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: (_: t -> Prims.bool) -> v: t
-> LowParse.Spec.Base.tot_parser LowParse.Spec.Combinators.parse_filter_payload_kind
(LowParse.Spec.Combinators.parse_filter_refine f) | Prims.Tot | [
"total"
] | [] | [
"Prims.bool",
"Prims.unit",
"LowParse.Spec.Base.parser_kind_prop_equiv",
"LowParse.Spec.Combinators.parse_filter_refine",
"LowParse.Spec.Combinators.parse_filter_payload_kind",
"LowParse.Spec.Base.tot_bare_parser",
"LowParse.Spec.Base.tot_weaken",
"LowParse.Spec.Combinators.parse_ret_kind",
"LowParse.Spec.Combinators.tot_parse_ret",
"Prims.eq2",
"LowParse.Spec.Combinators.tot_fail_parser",
"LowParse.Spec.Base.tot_parser"
] | [] | false | false | false | false | false | let tot_parse_filter_payload (#t: Type) (f: (t -> Tot bool)) (v: t)
: Tot (tot_parser parse_filter_payload_kind (parse_filter_refine f)) =
| let p:tot_bare_parser (parse_filter_refine f) =
if f v
then
let v':(x: t{f x == true}) = v in
tot_weaken parse_filter_payload_kind (tot_parse_ret v')
else tot_fail_parser parse_filter_payload_kind (parse_filter_refine f)
in
parser_kind_prop_equiv parse_filter_payload_kind p;
p | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.tot_nondep_then | val tot_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: tot_parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: tot_parser k2 t2)
: Pure (tot_parser (and_then_kind k1 k2) (t1 * t2))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x
)) | val tot_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: tot_parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: tot_parser k2 t2)
: Pure (tot_parser (and_then_kind k1 k2) (t1 * t2))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x
)) | let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 =
Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2 | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 28,
"end_line": 417,
"start_col": 0,
"start_line": 414
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= ()
let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p1: LowParse.Spec.Base.tot_parser k1 t1 -> p2: LowParse.Spec.Base.tot_parser k2 t2
-> Prims.Pure
(LowParse.Spec.Base.tot_parser (LowParse.Spec.Combinators.and_then_kind k1 k2) (t1 * t2)) | Prims.Pure | [] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.tot_parser",
"LowParse.Spec.Combinators.tot_nondep_then_bare",
"Prims.unit",
"LowParse.Spec.Base.parser_kind_prop_ext",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.nondep_then",
"FStar.Classical.forall_intro",
"LowParse.Bytes.bytes",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"FStar.Seq.Base.slice",
"LowParse.Bytes.byte",
"FStar.Seq.Base.length",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"Prims.op_Addition",
"FStar.Pervasives.Native.None",
"LowParse.Spec.Combinators.nondep_then_eq"
] | [] | false | false | false | false | false | let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 =
| Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2 | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.parse_filter | val parse_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(f: (t -> GTot bool))
: Tot (parser (parse_filter_kind k) (parse_filter_refine f)) | val parse_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(f: (t -> GTot bool))
: Tot (parser (parse_filter_kind k) (parse_filter_refine f)) | let parse_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(f: (t -> GTot bool))
: Tot (parser (parse_filter_kind k) (parse_filter_refine f))
= p `and_then` (parse_filter_payload f) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 39,
"end_line": 658,
"start_col": 0,
"start_line": 652
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= ()
let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 =
Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2
let serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
= serialize_tagged_union
s1
fst
(fun x -> serialize_synth p2 (fun y -> (x, y) <: refine_with_tag fst x) s2 (fun (xy: refine_with_tag fst x) -> snd xy) ())
let serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
= ()
let length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
= ()
let serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_left s (serialize s1 y);
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x))
let serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
= serialize_nondep_then_upd_left s1 s2 x y;
let s = serialize (serialize_nondep_then s1 s2) x in
let s1' = serialize s1 (fst x) in
let l1 = Seq.length s1' in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) s1' (serialize s2 (snd x));
seq_upd_seq_right_to_left s 0 s1' i' s';
seq_upd_seq_slice_idem s 0 (Seq.length s1')
let serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
= serialize_nondep_then_upd_left s1 s2 x y
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
= let j' = Seq.length (serialize s1 (fst x)) - i' - Seq.length s' in
serialize_nondep_then_upd_left_chain s1 s2 x y j' s';
assert (j' == Seq.length (serialize (serialize_nondep_then s1 s2) x) - (Seq.length (serialize s2 (snd x)) + i') - Seq.length s')
let serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_right s (serialize s2 y);
let l2 = Seq.length s - Seq.length (serialize s2 (snd x)) in
Seq.lemma_split s l2;
Seq.lemma_append_inj (Seq.slice s 0 l2) (Seq.slice s l2 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x))
let serialize_nondep_then_upd_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
l1 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s'
))
= serialize_nondep_then_upd_right s1 s2 x y;
let s = serialize (serialize_nondep_then s1 s2) x in
let s2' = serialize s2 (snd x) in
let l2 = Seq.length s - Seq.length s2' in
Seq.lemma_split s l2;
Seq.lemma_append_inj (Seq.slice s 0 l2) (Seq.slice s l2 (Seq.length s)) (serialize s1 (fst x)) s2';
seq_upd_seq_right_to_left s l2 s2' i' s';
seq_upd_seq_slice_idem s l2 (Seq.length s)
#reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let make_total_constant_size_parser_compose
(sz: nat)
(t1 t2: Type)
(f1: ((s: bytes {Seq.length s == sz}) -> GTot t1))
(g2: t1 -> GTot t2)
: Lemma
(requires (
make_total_constant_size_parser_precond sz t1 f1 /\
(forall x x' . g2 x == g2 x' ==> x == x')
))
(ensures (
make_total_constant_size_parser_precond sz t1 f1 /\
make_total_constant_size_parser_precond sz t2 (f1 `compose` g2) /\
(forall x x' . {:pattern (g2 x); (g2 x')} g2 x == g2 x' ==> x == x') /\
(forall input . {:pattern (parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input)} parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input == parse (make_total_constant_size_parser sz t1 f1 `parse_synth` g2) input)
))
= () | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowParse.Spec.Base.parser k t -> f: (_: t -> Prims.GTot Prims.bool)
-> LowParse.Spec.Base.parser (LowParse.Spec.Combinators.parse_filter_kind k)
(LowParse.Spec.Combinators.parse_filter_refine f) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"Prims.bool",
"LowParse.Spec.Combinators.and_then",
"LowParse.Spec.Combinators.parse_filter_payload_kind",
"LowParse.Spec.Combinators.parse_filter_refine",
"LowParse.Spec.Combinators.parse_filter_payload",
"LowParse.Spec.Combinators.parse_filter_kind"
] | [] | false | false | false | false | false | let parse_filter (#k: parser_kind) (#t: Type) (p: parser k t) (f: (t -> GTot bool))
: Tot (parser (parse_filter_kind k) (parse_filter_refine f)) =
| p `and_then` (parse_filter_payload f) | false |
FStar.Matrix.fsti | FStar.Matrix.ji_is_transpose_of_ij | val ji_is_transpose_of_ij (m n: pos) (ij: under (m * n))
: Lemma (transpose_ji n m (transpose_ji m n ij) = ij) | val ji_is_transpose_of_ij (m n: pos) (ij: under (m * n))
: Lemma (transpose_ji n m (transpose_ji m n ij) = ij) | let ji_is_transpose_of_ij (m n: pos) (ij: under (m*n))
: Lemma (transpose_ji n m (transpose_ji m n ij) = ij) =
indices_transpose_lemma m (get_i m n ij) (get_j m n ij) | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 57,
"end_line": 83,
"start_col": 0,
"start_line": 81
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *)
let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n
let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n
(* A proof that getting a 2D index back from the flattened
index works correctly *)
let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
flattened_index_is_under_flattened_size m n i j; //speeds up the proof
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n
(* A proof that getting the flattened index from 2D
indices works correctly *)
let consistency_of_ij (m n: pos) (ij: under (m*n))
: Lemma (get_ij m n (get_i m n ij) (get_j m n ij) == ij) = ()
(* The transposition transform for the flattened index *)
let transpose_ji (m n: pos) (ij: under (m*n)) : under (n*m) =
flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij);
(get_j m n ij)*m + (get_i m n ij)
(* Auxiliary arithmetic lemma *)
let indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j*m+i)%m=i) && ((j*m+i)/m=j)) = ML.lemma_mod_plus i j m | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: Prims.pos -> n: Prims.pos -> ij: FStar.IntegerIntervals.under (m * n)
-> FStar.Pervasives.Lemma
(ensures FStar.Matrix.transpose_ji n m (FStar.Matrix.transpose_ji m n ij) = ij) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Prims.pos",
"FStar.IntegerIntervals.under",
"FStar.Mul.op_Star",
"FStar.Matrix.indices_transpose_lemma",
"FStar.Matrix.get_i",
"FStar.Matrix.get_j",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.b2t",
"Prims.op_Equality",
"FStar.Matrix.transpose_ji",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let ji_is_transpose_of_ij (m n: pos) (ij: under (m * n))
: Lemma (transpose_ji n m (transpose_ji m n ij) = ij) =
| indices_transpose_lemma m (get_i m n ij) (get_j m n ij) | false |
FStar.Matrix.fsti | FStar.Matrix.is_left_distributive | val is_left_distributive : mul: FStar.Algebra.CommMonoid.Equiv.cm c eq -> add: FStar.Algebra.CommMonoid.Equiv.cm c eq
-> Prims.logical | let is_left_distributive #c #eq (mul add: CE.cm c eq) =
forall (x y z: c). mul.mult x (add.mult y z) `eq.eq` add.mult (mul.mult x y) (mul.mult x z) | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 93,
"end_line": 270,
"start_col": 0,
"start_line": 269
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *)
let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n
let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n
(* A proof that getting a 2D index back from the flattened
index works correctly *)
let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
flattened_index_is_under_flattened_size m n i j; //speeds up the proof
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n
(* A proof that getting the flattened index from 2D
indices works correctly *)
let consistency_of_ij (m n: pos) (ij: under (m*n))
: Lemma (get_ij m n (get_i m n ij) (get_j m n ij) == ij) = ()
(* The transposition transform for the flattened index *)
let transpose_ji (m n: pos) (ij: under (m*n)) : under (n*m) =
flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij);
(get_j m n ij)*m + (get_i m n ij)
(* Auxiliary arithmetic lemma *)
let indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j*m+i)%m=i) && ((j*m+i)/m=j)) = ML.lemma_mod_plus i j m
(* A proof of trasnspotition transform bijectivity *)
let ji_is_transpose_of_ij (m n: pos) (ij: under (m*n))
: Lemma (transpose_ji n m (transpose_ji m n ij) = ij) =
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A proof that 2D indices are swapped with the transpotition transform *)
let dual_indices (m n: pos) (ij: under (m*n)) : Lemma (
(get_j n m (transpose_ji m n ij) = get_i m n ij) /\
(get_i n m (transpose_ji m n ij) = get_j m n ij))
= consistency_of_ij m n ij;
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A matrix can always be treated as a flattened seq *)
val seq_of_matrix : (#c: Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) ->
(s:SB.seq c {
SB.length s=m*n /\
(forall (ij: under (m*n)). SB.index s ij == SB.index s (get_ij m n (get_i m n ij) (get_j m n ij)))
})
(* Indexer for a matrix *)
val ijth : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
(t:c{t == SB.index (seq_of_matrix mx) (get_ij m n i j)})
(* Indexer for a matrix returns the correct value *)
val ijth_lemma : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
Lemma (ijth mx i j == SB.index (seq_of_matrix mx) (get_ij m n i j))
(* A matrix can always be constructed from an m*n-sized seq *)
val matrix_of_seq : (#c: Type) -> (m:pos) -> (n:pos) -> (s: SB.seq c{SB.length s = m*n}) -> matrix c m n
(* A type for matrices constructed via concrete generator *)
type matrix_of #c (#m #n: pos) (gen: matrix_generator c m n) = z:matrix c m n {
(forall (i: under m) (j: under n). ijth z i j == gen i j) /\
(forall (ij: under (m*n)). (SB.index (seq_of_matrix z) ij) == (gen (get_i m n ij) (get_j m n ij)))
}
(* Monoid-based fold of a matrix treated as a flat seq *)
val foldm : (#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n) -> c
(* foldm_snoc of the corresponding seq is equal to foldm of the matrix *)
val matrix_fold_equals_fold_of_seq :
(#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n)
-> Lemma (ensures foldm cm mx `eq.eq` SP.foldm_snoc cm (seq_of_matrix mx)) [SMTPat(foldm cm mx)]
(* A matrix constructed from given generator *)
val init : (#c:Type) -> (#m:pos) -> (#n: pos) -> (generator: matrix_generator c m n)
-> matrix_of generator
(* A matrix fold is equal to double foldm_snoc over init-generated seq of seqs *)
val matrix_fold_equals_fold_of_seq_folds : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (ensures foldm cm (init generator) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
/\ SP.foldm_snoc cm (seq_of_matrix (init generator)) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
)
(* This auxiliary lemma shows that the fold of the last line of a matrix
is equal to the corresponding fold of the generator function *)
(* This lemma establishes that the fold of a matrix is equal to
nested Algebra.CommMonoid.Fold.fold over the matrix generator *)
val matrix_fold_equals_func_double_fold : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (foldm cm (init generator) `eq.eq`
CF.fold cm 0 (m-1) (fun (i:under m) -> CF.fold cm 0 (n-1) (generator i)))
val transposed_matrix_gen (#c:_) (#m:pos) (#n:pos) (generator: matrix_generator c m n)
: (f: matrix_generator c n m { forall i j. f j i == generator i j })
val matrix_transpose_is_permutation (#c:_) (#m #n: pos)
(generator: matrix_generator c m n)
: Lemma (SP.is_permutation (seq_of_matrix (init generator))
(seq_of_matrix (init (transposed_matrix_gen generator)))
(transpose_ji m n))
val matrix_fold_equals_fold_of_transpose (#c:_) (#eq:_)
(#m #n: pos)
(cm: CE.cm c eq)
(gen: matrix_generator c m n)
: Lemma (foldm cm (init gen) `eq.eq`
foldm cm (init (transposed_matrix_gen gen)))
(* The equivalence relation defined for matrices of given dimensions *)
val matrix_equiv : (#c: Type) ->
(eq: CE.equiv c) ->
(m: pos) -> (n: pos) ->
CE.equiv (matrix c m n)
(* element-wise matrix equivalence lemma *)
val matrix_equiv_ijth (#c:_) (#m #n: pos) (eq: CE.equiv c)
(ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (requires (matrix_equiv eq m n).eq ma mb)
(ensures ijth ma i j `eq.eq` ijth mb i j)
(* We can always establish matrix equivalence from element-wise equivalence *)
val matrix_equiv_from_element_eq (#c:_) (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
: Lemma (requires (forall (i: under m) (j: under n). ijth ma i j `eq.eq` ijth mb i j))
(ensures (matrix_equiv eq m n).eq ma mb)
(*
Notice that even though we can (and will) construct CommMonoid for matrix addition,
we still publish the operations as well since as soon as we get to multiplication,
results usually have different dimensions, so it would be convenient to have both
the CommMonoid for matrix addition and the explicit addition function.
This becomes the only way with non-square matrix multiplication, since these
would not constitute a monoid to begin with.
*)
(* This version of the lemma is useful if we don't want to invoke
Classical.forall_intro_2 in a big proof to conserve resources *)
let matrix_equiv_from_proof #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
(proof: (i:under m) -> (j:under n) -> Lemma (eq.eq (ijth ma i j) (ijth mb i j)))
: Lemma ((matrix_equiv eq m n).eq ma mb)
= Classical.forall_intro_2 proof;
matrix_equiv_from_element_eq eq ma mb
(* This one is the generator function for sum of matrices *)
let matrix_add_generator #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_generator c m n = fun i j -> add.mult (ijth ma i j) (ijth mb i j)
(* This is the matrix sum operation given the addition CommMonoid *)
let matrix_add #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_of (matrix_add_generator add ma mb)
= init (matrix_add_generator add ma mb)
(* Sum of matrices ijth element lemma *)
let matrix_add_ijth #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth (matrix_add add ma mb) i j == add.mult (ijth ma i j) (ijth mb i j)) = ()
(* m*n-sized matrix addition CommMonoid *)
val matrix_add_comm_monoid : (#c:Type) ->
(#eq:CE.equiv c) ->
(add: CE.cm c eq) ->
(m:pos) -> (n: pos) ->
CE.cm (matrix c m n) (matrix_equiv eq m n)
(* Sometimes we want matrix rows and columns to be accessed as sequences *)
let col #c #m #n (mx: matrix c m n) (j: under n) = SB.init m (fun (i: under m) -> ijth mx i j)
let row #c #m #n (mx: matrix c m n) (i: under m) = SB.init n (fun (j: under n) -> ijth mx i j)
(* ijth-based and row/col-based element access methods are equivalent *)
val matrix_row_col_lemma (#c:_) (#m #n:pos) (mx: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth mx i j == SB.index (row mx i) j /\ ijth mx i j == SB.index (col mx j) i)
(* This transforms a seq X={Xi} into a seq X={Xi `op` c} *)
let seq_op_const #c #eq (cm: CE.cm c eq) (s: SB.seq c) (const: c)
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult (SB.index s i) const)
(* Well, technically it is the same thing as above, given cm is commutative.
We will still use prefix and postfix applications separately since
sometimes provable equality (==) rather than `eq.eq` comes in handy *)
let const_op_seq #c #eq (cm: CE.cm c eq) (const: c) (s: SB.seq c)
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult const (SB.index s i))
(* We can get a sequence of products (or sums) from two sequences of equal length *)
let seq_of_products #c #eq (mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c {SB.length t == SB.length s})
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> SB.index s i `mul.mult` SB.index t i)
(* As trivial as it seems to be, sometimes this lemma proves to be useful, mostly because
lemma_eq_elim invocation is surprisingly costly resources-wise. *)
val seq_of_products_lemma (#c:_) (#eq:_) (mul: CE.cm c eq)
(s: SB.seq c) (t: SB.seq c {SB.length t == SB.length s})
(r: SB.seq c { SB.equal r (SB.init (SB.length s)
(fun (i: under (SB.length s)) ->
SB.index s i `mul.mult` SB.index t i))})
: Lemma (seq_of_products mul s t == r)
(* The usual dot product of two sequences of equal lengths *)
let dot #c #eq (add mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c{SB.length t == SB.length s})
= SP.foldm_snoc add (seq_of_products mul s t)
val dot_lemma (#c:_) (#eq:_) (add mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c{SB.length t == SB.length s})
: Lemma (dot add mul s t == SP.foldm_snoc add (seq_of_products mul s t))
(* Of course, it would be best to define the matrix product as a convolution,
but we don't have all the necessary framework for that level of generality yet. *)
val matrix_mul (#c:_) (#eq:_) (#m #n #p:pos) (add mul: CE.cm c eq) (mx: matrix c m n) (my: matrix c n p)
: matrix c m p | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mul: FStar.Algebra.CommMonoid.Equiv.cm c eq -> add: FStar.Algebra.CommMonoid.Equiv.cm c eq
-> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"FStar.Algebra.CommMonoid.Equiv.equiv",
"FStar.Algebra.CommMonoid.Equiv.cm",
"Prims.l_Forall",
"FStar.Algebra.CommMonoid.Equiv.__proj__EQ__item__eq",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult",
"Prims.logical"
] | [] | false | false | false | false | true | let is_left_distributive #c #eq (mul: CE.cm c eq) (add: CE.cm c eq) =
| forall (x: c) (y: c) (z: c).
(mul.mult x (add.mult y z)) `eq.eq` (add.mult (mul.mult x y) (mul.mult x z)) | false |
|
FStar.Matrix.fsti | FStar.Matrix.is_right_distributive | val is_right_distributive : mul: FStar.Algebra.CommMonoid.Equiv.cm c eq -> add: FStar.Algebra.CommMonoid.Equiv.cm c eq
-> Prims.logical | let is_right_distributive #c #eq (mul add: CE.cm c eq) =
forall (x y z: c). mul.mult (add.mult x y) z `eq.eq` add.mult (mul.mult x z) (mul.mult y z) | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 93,
"end_line": 273,
"start_col": 0,
"start_line": 272
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *)
let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n
let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n
(* A proof that getting a 2D index back from the flattened
index works correctly *)
let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
flattened_index_is_under_flattened_size m n i j; //speeds up the proof
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n
(* A proof that getting the flattened index from 2D
indices works correctly *)
let consistency_of_ij (m n: pos) (ij: under (m*n))
: Lemma (get_ij m n (get_i m n ij) (get_j m n ij) == ij) = ()
(* The transposition transform for the flattened index *)
let transpose_ji (m n: pos) (ij: under (m*n)) : under (n*m) =
flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij);
(get_j m n ij)*m + (get_i m n ij)
(* Auxiliary arithmetic lemma *)
let indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j*m+i)%m=i) && ((j*m+i)/m=j)) = ML.lemma_mod_plus i j m
(* A proof of trasnspotition transform bijectivity *)
let ji_is_transpose_of_ij (m n: pos) (ij: under (m*n))
: Lemma (transpose_ji n m (transpose_ji m n ij) = ij) =
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A proof that 2D indices are swapped with the transpotition transform *)
let dual_indices (m n: pos) (ij: under (m*n)) : Lemma (
(get_j n m (transpose_ji m n ij) = get_i m n ij) /\
(get_i n m (transpose_ji m n ij) = get_j m n ij))
= consistency_of_ij m n ij;
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A matrix can always be treated as a flattened seq *)
val seq_of_matrix : (#c: Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) ->
(s:SB.seq c {
SB.length s=m*n /\
(forall (ij: under (m*n)). SB.index s ij == SB.index s (get_ij m n (get_i m n ij) (get_j m n ij)))
})
(* Indexer for a matrix *)
val ijth : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
(t:c{t == SB.index (seq_of_matrix mx) (get_ij m n i j)})
(* Indexer for a matrix returns the correct value *)
val ijth_lemma : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
Lemma (ijth mx i j == SB.index (seq_of_matrix mx) (get_ij m n i j))
(* A matrix can always be constructed from an m*n-sized seq *)
val matrix_of_seq : (#c: Type) -> (m:pos) -> (n:pos) -> (s: SB.seq c{SB.length s = m*n}) -> matrix c m n
(* A type for matrices constructed via concrete generator *)
type matrix_of #c (#m #n: pos) (gen: matrix_generator c m n) = z:matrix c m n {
(forall (i: under m) (j: under n). ijth z i j == gen i j) /\
(forall (ij: under (m*n)). (SB.index (seq_of_matrix z) ij) == (gen (get_i m n ij) (get_j m n ij)))
}
(* Monoid-based fold of a matrix treated as a flat seq *)
val foldm : (#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n) -> c
(* foldm_snoc of the corresponding seq is equal to foldm of the matrix *)
val matrix_fold_equals_fold_of_seq :
(#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n)
-> Lemma (ensures foldm cm mx `eq.eq` SP.foldm_snoc cm (seq_of_matrix mx)) [SMTPat(foldm cm mx)]
(* A matrix constructed from given generator *)
val init : (#c:Type) -> (#m:pos) -> (#n: pos) -> (generator: matrix_generator c m n)
-> matrix_of generator
(* A matrix fold is equal to double foldm_snoc over init-generated seq of seqs *)
val matrix_fold_equals_fold_of_seq_folds : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (ensures foldm cm (init generator) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
/\ SP.foldm_snoc cm (seq_of_matrix (init generator)) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
)
(* This auxiliary lemma shows that the fold of the last line of a matrix
is equal to the corresponding fold of the generator function *)
(* This lemma establishes that the fold of a matrix is equal to
nested Algebra.CommMonoid.Fold.fold over the matrix generator *)
val matrix_fold_equals_func_double_fold : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (foldm cm (init generator) `eq.eq`
CF.fold cm 0 (m-1) (fun (i:under m) -> CF.fold cm 0 (n-1) (generator i)))
val transposed_matrix_gen (#c:_) (#m:pos) (#n:pos) (generator: matrix_generator c m n)
: (f: matrix_generator c n m { forall i j. f j i == generator i j })
val matrix_transpose_is_permutation (#c:_) (#m #n: pos)
(generator: matrix_generator c m n)
: Lemma (SP.is_permutation (seq_of_matrix (init generator))
(seq_of_matrix (init (transposed_matrix_gen generator)))
(transpose_ji m n))
val matrix_fold_equals_fold_of_transpose (#c:_) (#eq:_)
(#m #n: pos)
(cm: CE.cm c eq)
(gen: matrix_generator c m n)
: Lemma (foldm cm (init gen) `eq.eq`
foldm cm (init (transposed_matrix_gen gen)))
(* The equivalence relation defined for matrices of given dimensions *)
val matrix_equiv : (#c: Type) ->
(eq: CE.equiv c) ->
(m: pos) -> (n: pos) ->
CE.equiv (matrix c m n)
(* element-wise matrix equivalence lemma *)
val matrix_equiv_ijth (#c:_) (#m #n: pos) (eq: CE.equiv c)
(ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (requires (matrix_equiv eq m n).eq ma mb)
(ensures ijth ma i j `eq.eq` ijth mb i j)
(* We can always establish matrix equivalence from element-wise equivalence *)
val matrix_equiv_from_element_eq (#c:_) (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
: Lemma (requires (forall (i: under m) (j: under n). ijth ma i j `eq.eq` ijth mb i j))
(ensures (matrix_equiv eq m n).eq ma mb)
(*
Notice that even though we can (and will) construct CommMonoid for matrix addition,
we still publish the operations as well since as soon as we get to multiplication,
results usually have different dimensions, so it would be convenient to have both
the CommMonoid for matrix addition and the explicit addition function.
This becomes the only way with non-square matrix multiplication, since these
would not constitute a monoid to begin with.
*)
(* This version of the lemma is useful if we don't want to invoke
Classical.forall_intro_2 in a big proof to conserve resources *)
let matrix_equiv_from_proof #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
(proof: (i:under m) -> (j:under n) -> Lemma (eq.eq (ijth ma i j) (ijth mb i j)))
: Lemma ((matrix_equiv eq m n).eq ma mb)
= Classical.forall_intro_2 proof;
matrix_equiv_from_element_eq eq ma mb
(* This one is the generator function for sum of matrices *)
let matrix_add_generator #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_generator c m n = fun i j -> add.mult (ijth ma i j) (ijth mb i j)
(* This is the matrix sum operation given the addition CommMonoid *)
let matrix_add #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_of (matrix_add_generator add ma mb)
= init (matrix_add_generator add ma mb)
(* Sum of matrices ijth element lemma *)
let matrix_add_ijth #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth (matrix_add add ma mb) i j == add.mult (ijth ma i j) (ijth mb i j)) = ()
(* m*n-sized matrix addition CommMonoid *)
val matrix_add_comm_monoid : (#c:Type) ->
(#eq:CE.equiv c) ->
(add: CE.cm c eq) ->
(m:pos) -> (n: pos) ->
CE.cm (matrix c m n) (matrix_equiv eq m n)
(* Sometimes we want matrix rows and columns to be accessed as sequences *)
let col #c #m #n (mx: matrix c m n) (j: under n) = SB.init m (fun (i: under m) -> ijth mx i j)
let row #c #m #n (mx: matrix c m n) (i: under m) = SB.init n (fun (j: under n) -> ijth mx i j)
(* ijth-based and row/col-based element access methods are equivalent *)
val matrix_row_col_lemma (#c:_) (#m #n:pos) (mx: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth mx i j == SB.index (row mx i) j /\ ijth mx i j == SB.index (col mx j) i)
(* This transforms a seq X={Xi} into a seq X={Xi `op` c} *)
let seq_op_const #c #eq (cm: CE.cm c eq) (s: SB.seq c) (const: c)
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult (SB.index s i) const)
(* Well, technically it is the same thing as above, given cm is commutative.
We will still use prefix and postfix applications separately since
sometimes provable equality (==) rather than `eq.eq` comes in handy *)
let const_op_seq #c #eq (cm: CE.cm c eq) (const: c) (s: SB.seq c)
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult const (SB.index s i))
(* We can get a sequence of products (or sums) from two sequences of equal length *)
let seq_of_products #c #eq (mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c {SB.length t == SB.length s})
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> SB.index s i `mul.mult` SB.index t i)
(* As trivial as it seems to be, sometimes this lemma proves to be useful, mostly because
lemma_eq_elim invocation is surprisingly costly resources-wise. *)
val seq_of_products_lemma (#c:_) (#eq:_) (mul: CE.cm c eq)
(s: SB.seq c) (t: SB.seq c {SB.length t == SB.length s})
(r: SB.seq c { SB.equal r (SB.init (SB.length s)
(fun (i: under (SB.length s)) ->
SB.index s i `mul.mult` SB.index t i))})
: Lemma (seq_of_products mul s t == r)
(* The usual dot product of two sequences of equal lengths *)
let dot #c #eq (add mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c{SB.length t == SB.length s})
= SP.foldm_snoc add (seq_of_products mul s t)
val dot_lemma (#c:_) (#eq:_) (add mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c{SB.length t == SB.length s})
: Lemma (dot add mul s t == SP.foldm_snoc add (seq_of_products mul s t))
(* Of course, it would be best to define the matrix product as a convolution,
but we don't have all the necessary framework for that level of generality yet. *)
val matrix_mul (#c:_) (#eq:_) (#m #n #p:pos) (add mul: CE.cm c eq) (mx: matrix c m n) (my: matrix c n p)
: matrix c m p
(* Both distributivity laws hold for matrices as shown below *)
let is_left_distributive #c #eq (mul add: CE.cm c eq) =
forall (x y z: c). mul.mult x (add.mult y z) `eq.eq` add.mult (mul.mult x y) (mul.mult x z) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mul: FStar.Algebra.CommMonoid.Equiv.cm c eq -> add: FStar.Algebra.CommMonoid.Equiv.cm c eq
-> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"FStar.Algebra.CommMonoid.Equiv.equiv",
"FStar.Algebra.CommMonoid.Equiv.cm",
"Prims.l_Forall",
"FStar.Algebra.CommMonoid.Equiv.__proj__EQ__item__eq",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult",
"Prims.logical"
] | [] | false | false | false | false | true | let is_right_distributive #c #eq (mul: CE.cm c eq) (add: CE.cm c eq) =
| forall (x: c) (y: c) (z: c).
(mul.mult (add.mult x y) z) `eq.eq` (add.mult (mul.mult x z) (mul.mult y z)) | false |
|
FStar.Matrix.fsti | FStar.Matrix.dot | val dot : add: FStar.Algebra.CommMonoid.Equiv.cm c eq ->
mul: FStar.Algebra.CommMonoid.Equiv.cm c eq ->
s: FStar.Seq.Base.seq c ->
t: FStar.Seq.Base.seq c {FStar.Seq.Base.length t == FStar.Seq.Base.length s}
-> c | let dot #c #eq (add mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c{SB.length t == SB.length s})
= SP.foldm_snoc add (seq_of_products mul s t) | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 47,
"end_line": 258,
"start_col": 0,
"start_line": 257
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *)
let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n
let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n
(* A proof that getting a 2D index back from the flattened
index works correctly *)
let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
flattened_index_is_under_flattened_size m n i j; //speeds up the proof
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n
(* A proof that getting the flattened index from 2D
indices works correctly *)
let consistency_of_ij (m n: pos) (ij: under (m*n))
: Lemma (get_ij m n (get_i m n ij) (get_j m n ij) == ij) = ()
(* The transposition transform for the flattened index *)
let transpose_ji (m n: pos) (ij: under (m*n)) : under (n*m) =
flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij);
(get_j m n ij)*m + (get_i m n ij)
(* Auxiliary arithmetic lemma *)
let indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j*m+i)%m=i) && ((j*m+i)/m=j)) = ML.lemma_mod_plus i j m
(* A proof of trasnspotition transform bijectivity *)
let ji_is_transpose_of_ij (m n: pos) (ij: under (m*n))
: Lemma (transpose_ji n m (transpose_ji m n ij) = ij) =
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A proof that 2D indices are swapped with the transpotition transform *)
let dual_indices (m n: pos) (ij: under (m*n)) : Lemma (
(get_j n m (transpose_ji m n ij) = get_i m n ij) /\
(get_i n m (transpose_ji m n ij) = get_j m n ij))
= consistency_of_ij m n ij;
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A matrix can always be treated as a flattened seq *)
val seq_of_matrix : (#c: Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) ->
(s:SB.seq c {
SB.length s=m*n /\
(forall (ij: under (m*n)). SB.index s ij == SB.index s (get_ij m n (get_i m n ij) (get_j m n ij)))
})
(* Indexer for a matrix *)
val ijth : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
(t:c{t == SB.index (seq_of_matrix mx) (get_ij m n i j)})
(* Indexer for a matrix returns the correct value *)
val ijth_lemma : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
Lemma (ijth mx i j == SB.index (seq_of_matrix mx) (get_ij m n i j))
(* A matrix can always be constructed from an m*n-sized seq *)
val matrix_of_seq : (#c: Type) -> (m:pos) -> (n:pos) -> (s: SB.seq c{SB.length s = m*n}) -> matrix c m n
(* A type for matrices constructed via concrete generator *)
type matrix_of #c (#m #n: pos) (gen: matrix_generator c m n) = z:matrix c m n {
(forall (i: under m) (j: under n). ijth z i j == gen i j) /\
(forall (ij: under (m*n)). (SB.index (seq_of_matrix z) ij) == (gen (get_i m n ij) (get_j m n ij)))
}
(* Monoid-based fold of a matrix treated as a flat seq *)
val foldm : (#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n) -> c
(* foldm_snoc of the corresponding seq is equal to foldm of the matrix *)
val matrix_fold_equals_fold_of_seq :
(#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n)
-> Lemma (ensures foldm cm mx `eq.eq` SP.foldm_snoc cm (seq_of_matrix mx)) [SMTPat(foldm cm mx)]
(* A matrix constructed from given generator *)
val init : (#c:Type) -> (#m:pos) -> (#n: pos) -> (generator: matrix_generator c m n)
-> matrix_of generator
(* A matrix fold is equal to double foldm_snoc over init-generated seq of seqs *)
val matrix_fold_equals_fold_of_seq_folds : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (ensures foldm cm (init generator) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
/\ SP.foldm_snoc cm (seq_of_matrix (init generator)) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
)
(* This auxiliary lemma shows that the fold of the last line of a matrix
is equal to the corresponding fold of the generator function *)
(* This lemma establishes that the fold of a matrix is equal to
nested Algebra.CommMonoid.Fold.fold over the matrix generator *)
val matrix_fold_equals_func_double_fold : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (foldm cm (init generator) `eq.eq`
CF.fold cm 0 (m-1) (fun (i:under m) -> CF.fold cm 0 (n-1) (generator i)))
val transposed_matrix_gen (#c:_) (#m:pos) (#n:pos) (generator: matrix_generator c m n)
: (f: matrix_generator c n m { forall i j. f j i == generator i j })
val matrix_transpose_is_permutation (#c:_) (#m #n: pos)
(generator: matrix_generator c m n)
: Lemma (SP.is_permutation (seq_of_matrix (init generator))
(seq_of_matrix (init (transposed_matrix_gen generator)))
(transpose_ji m n))
val matrix_fold_equals_fold_of_transpose (#c:_) (#eq:_)
(#m #n: pos)
(cm: CE.cm c eq)
(gen: matrix_generator c m n)
: Lemma (foldm cm (init gen) `eq.eq`
foldm cm (init (transposed_matrix_gen gen)))
(* The equivalence relation defined for matrices of given dimensions *)
val matrix_equiv : (#c: Type) ->
(eq: CE.equiv c) ->
(m: pos) -> (n: pos) ->
CE.equiv (matrix c m n)
(* element-wise matrix equivalence lemma *)
val matrix_equiv_ijth (#c:_) (#m #n: pos) (eq: CE.equiv c)
(ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (requires (matrix_equiv eq m n).eq ma mb)
(ensures ijth ma i j `eq.eq` ijth mb i j)
(* We can always establish matrix equivalence from element-wise equivalence *)
val matrix_equiv_from_element_eq (#c:_) (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
: Lemma (requires (forall (i: under m) (j: under n). ijth ma i j `eq.eq` ijth mb i j))
(ensures (matrix_equiv eq m n).eq ma mb)
(*
Notice that even though we can (and will) construct CommMonoid for matrix addition,
we still publish the operations as well since as soon as we get to multiplication,
results usually have different dimensions, so it would be convenient to have both
the CommMonoid for matrix addition and the explicit addition function.
This becomes the only way with non-square matrix multiplication, since these
would not constitute a monoid to begin with.
*)
(* This version of the lemma is useful if we don't want to invoke
Classical.forall_intro_2 in a big proof to conserve resources *)
let matrix_equiv_from_proof #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
(proof: (i:under m) -> (j:under n) -> Lemma (eq.eq (ijth ma i j) (ijth mb i j)))
: Lemma ((matrix_equiv eq m n).eq ma mb)
= Classical.forall_intro_2 proof;
matrix_equiv_from_element_eq eq ma mb
(* This one is the generator function for sum of matrices *)
let matrix_add_generator #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_generator c m n = fun i j -> add.mult (ijth ma i j) (ijth mb i j)
(* This is the matrix sum operation given the addition CommMonoid *)
let matrix_add #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_of (matrix_add_generator add ma mb)
= init (matrix_add_generator add ma mb)
(* Sum of matrices ijth element lemma *)
let matrix_add_ijth #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth (matrix_add add ma mb) i j == add.mult (ijth ma i j) (ijth mb i j)) = ()
(* m*n-sized matrix addition CommMonoid *)
val matrix_add_comm_monoid : (#c:Type) ->
(#eq:CE.equiv c) ->
(add: CE.cm c eq) ->
(m:pos) -> (n: pos) ->
CE.cm (matrix c m n) (matrix_equiv eq m n)
(* Sometimes we want matrix rows and columns to be accessed as sequences *)
let col #c #m #n (mx: matrix c m n) (j: under n) = SB.init m (fun (i: under m) -> ijth mx i j)
let row #c #m #n (mx: matrix c m n) (i: under m) = SB.init n (fun (j: under n) -> ijth mx i j)
(* ijth-based and row/col-based element access methods are equivalent *)
val matrix_row_col_lemma (#c:_) (#m #n:pos) (mx: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth mx i j == SB.index (row mx i) j /\ ijth mx i j == SB.index (col mx j) i)
(* This transforms a seq X={Xi} into a seq X={Xi `op` c} *)
let seq_op_const #c #eq (cm: CE.cm c eq) (s: SB.seq c) (const: c)
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult (SB.index s i) const)
(* Well, technically it is the same thing as above, given cm is commutative.
We will still use prefix and postfix applications separately since
sometimes provable equality (==) rather than `eq.eq` comes in handy *)
let const_op_seq #c #eq (cm: CE.cm c eq) (const: c) (s: SB.seq c)
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult const (SB.index s i))
(* We can get a sequence of products (or sums) from two sequences of equal length *)
let seq_of_products #c #eq (mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c {SB.length t == SB.length s})
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> SB.index s i `mul.mult` SB.index t i)
(* As trivial as it seems to be, sometimes this lemma proves to be useful, mostly because
lemma_eq_elim invocation is surprisingly costly resources-wise. *)
val seq_of_products_lemma (#c:_) (#eq:_) (mul: CE.cm c eq)
(s: SB.seq c) (t: SB.seq c {SB.length t == SB.length s})
(r: SB.seq c { SB.equal r (SB.init (SB.length s)
(fun (i: under (SB.length s)) ->
SB.index s i `mul.mult` SB.index t i))})
: Lemma (seq_of_products mul s t == r) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
add: FStar.Algebra.CommMonoid.Equiv.cm c eq ->
mul: FStar.Algebra.CommMonoid.Equiv.cm c eq ->
s: FStar.Seq.Base.seq c ->
t: FStar.Seq.Base.seq c {FStar.Seq.Base.length t == FStar.Seq.Base.length s}
-> c | Prims.Tot | [
"total"
] | [] | [
"FStar.Algebra.CommMonoid.Equiv.equiv",
"FStar.Algebra.CommMonoid.Equiv.cm",
"FStar.Seq.Base.seq",
"Prims.eq2",
"Prims.nat",
"FStar.Seq.Base.length",
"FStar.Seq.Permutation.foldm_snoc",
"FStar.Matrix.seq_of_products"
] | [] | false | false | false | false | false | let dot
#c
#eq
(add: CE.cm c eq)
(mul: CE.cm c eq)
(s: SB.seq c)
(t: SB.seq c {SB.length t == SB.length s})
=
| SP.foldm_snoc add (seq_of_products mul s t) | false |
|
FStar.Matrix.fsti | FStar.Matrix.is_fully_distributive | val is_fully_distributive : mul: FStar.Algebra.CommMonoid.Equiv.cm c eq -> add: FStar.Algebra.CommMonoid.Equiv.cm c eq
-> Prims.logical | let is_fully_distributive #c #eq (mul add: CE.cm c eq) = is_left_distributive mul add /\ is_right_distributive mul add | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 118,
"end_line": 275,
"start_col": 0,
"start_line": 275
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *)
let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n
let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n
(* A proof that getting a 2D index back from the flattened
index works correctly *)
let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
flattened_index_is_under_flattened_size m n i j; //speeds up the proof
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n
(* A proof that getting the flattened index from 2D
indices works correctly *)
let consistency_of_ij (m n: pos) (ij: under (m*n))
: Lemma (get_ij m n (get_i m n ij) (get_j m n ij) == ij) = ()
(* The transposition transform for the flattened index *)
let transpose_ji (m n: pos) (ij: under (m*n)) : under (n*m) =
flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij);
(get_j m n ij)*m + (get_i m n ij)
(* Auxiliary arithmetic lemma *)
let indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j*m+i)%m=i) && ((j*m+i)/m=j)) = ML.lemma_mod_plus i j m
(* A proof of trasnspotition transform bijectivity *)
let ji_is_transpose_of_ij (m n: pos) (ij: under (m*n))
: Lemma (transpose_ji n m (transpose_ji m n ij) = ij) =
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A proof that 2D indices are swapped with the transpotition transform *)
let dual_indices (m n: pos) (ij: under (m*n)) : Lemma (
(get_j n m (transpose_ji m n ij) = get_i m n ij) /\
(get_i n m (transpose_ji m n ij) = get_j m n ij))
= consistency_of_ij m n ij;
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A matrix can always be treated as a flattened seq *)
val seq_of_matrix : (#c: Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) ->
(s:SB.seq c {
SB.length s=m*n /\
(forall (ij: under (m*n)). SB.index s ij == SB.index s (get_ij m n (get_i m n ij) (get_j m n ij)))
})
(* Indexer for a matrix *)
val ijth : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
(t:c{t == SB.index (seq_of_matrix mx) (get_ij m n i j)})
(* Indexer for a matrix returns the correct value *)
val ijth_lemma : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
Lemma (ijth mx i j == SB.index (seq_of_matrix mx) (get_ij m n i j))
(* A matrix can always be constructed from an m*n-sized seq *)
val matrix_of_seq : (#c: Type) -> (m:pos) -> (n:pos) -> (s: SB.seq c{SB.length s = m*n}) -> matrix c m n
(* A type for matrices constructed via concrete generator *)
type matrix_of #c (#m #n: pos) (gen: matrix_generator c m n) = z:matrix c m n {
(forall (i: under m) (j: under n). ijth z i j == gen i j) /\
(forall (ij: under (m*n)). (SB.index (seq_of_matrix z) ij) == (gen (get_i m n ij) (get_j m n ij)))
}
(* Monoid-based fold of a matrix treated as a flat seq *)
val foldm : (#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n) -> c
(* foldm_snoc of the corresponding seq is equal to foldm of the matrix *)
val matrix_fold_equals_fold_of_seq :
(#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n)
-> Lemma (ensures foldm cm mx `eq.eq` SP.foldm_snoc cm (seq_of_matrix mx)) [SMTPat(foldm cm mx)]
(* A matrix constructed from given generator *)
val init : (#c:Type) -> (#m:pos) -> (#n: pos) -> (generator: matrix_generator c m n)
-> matrix_of generator
(* A matrix fold is equal to double foldm_snoc over init-generated seq of seqs *)
val matrix_fold_equals_fold_of_seq_folds : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (ensures foldm cm (init generator) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
/\ SP.foldm_snoc cm (seq_of_matrix (init generator)) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
)
(* This auxiliary lemma shows that the fold of the last line of a matrix
is equal to the corresponding fold of the generator function *)
(* This lemma establishes that the fold of a matrix is equal to
nested Algebra.CommMonoid.Fold.fold over the matrix generator *)
val matrix_fold_equals_func_double_fold : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (foldm cm (init generator) `eq.eq`
CF.fold cm 0 (m-1) (fun (i:under m) -> CF.fold cm 0 (n-1) (generator i)))
val transposed_matrix_gen (#c:_) (#m:pos) (#n:pos) (generator: matrix_generator c m n)
: (f: matrix_generator c n m { forall i j. f j i == generator i j })
val matrix_transpose_is_permutation (#c:_) (#m #n: pos)
(generator: matrix_generator c m n)
: Lemma (SP.is_permutation (seq_of_matrix (init generator))
(seq_of_matrix (init (transposed_matrix_gen generator)))
(transpose_ji m n))
val matrix_fold_equals_fold_of_transpose (#c:_) (#eq:_)
(#m #n: pos)
(cm: CE.cm c eq)
(gen: matrix_generator c m n)
: Lemma (foldm cm (init gen) `eq.eq`
foldm cm (init (transposed_matrix_gen gen)))
(* The equivalence relation defined for matrices of given dimensions *)
val matrix_equiv : (#c: Type) ->
(eq: CE.equiv c) ->
(m: pos) -> (n: pos) ->
CE.equiv (matrix c m n)
(* element-wise matrix equivalence lemma *)
val matrix_equiv_ijth (#c:_) (#m #n: pos) (eq: CE.equiv c)
(ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (requires (matrix_equiv eq m n).eq ma mb)
(ensures ijth ma i j `eq.eq` ijth mb i j)
(* We can always establish matrix equivalence from element-wise equivalence *)
val matrix_equiv_from_element_eq (#c:_) (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
: Lemma (requires (forall (i: under m) (j: under n). ijth ma i j `eq.eq` ijth mb i j))
(ensures (matrix_equiv eq m n).eq ma mb)
(*
Notice that even though we can (and will) construct CommMonoid for matrix addition,
we still publish the operations as well since as soon as we get to multiplication,
results usually have different dimensions, so it would be convenient to have both
the CommMonoid for matrix addition and the explicit addition function.
This becomes the only way with non-square matrix multiplication, since these
would not constitute a monoid to begin with.
*)
(* This version of the lemma is useful if we don't want to invoke
Classical.forall_intro_2 in a big proof to conserve resources *)
let matrix_equiv_from_proof #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
(proof: (i:under m) -> (j:under n) -> Lemma (eq.eq (ijth ma i j) (ijth mb i j)))
: Lemma ((matrix_equiv eq m n).eq ma mb)
= Classical.forall_intro_2 proof;
matrix_equiv_from_element_eq eq ma mb
(* This one is the generator function for sum of matrices *)
let matrix_add_generator #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_generator c m n = fun i j -> add.mult (ijth ma i j) (ijth mb i j)
(* This is the matrix sum operation given the addition CommMonoid *)
let matrix_add #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_of (matrix_add_generator add ma mb)
= init (matrix_add_generator add ma mb)
(* Sum of matrices ijth element lemma *)
let matrix_add_ijth #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth (matrix_add add ma mb) i j == add.mult (ijth ma i j) (ijth mb i j)) = ()
(* m*n-sized matrix addition CommMonoid *)
val matrix_add_comm_monoid : (#c:Type) ->
(#eq:CE.equiv c) ->
(add: CE.cm c eq) ->
(m:pos) -> (n: pos) ->
CE.cm (matrix c m n) (matrix_equiv eq m n)
(* Sometimes we want matrix rows and columns to be accessed as sequences *)
let col #c #m #n (mx: matrix c m n) (j: under n) = SB.init m (fun (i: under m) -> ijth mx i j)
let row #c #m #n (mx: matrix c m n) (i: under m) = SB.init n (fun (j: under n) -> ijth mx i j)
(* ijth-based and row/col-based element access methods are equivalent *)
val matrix_row_col_lemma (#c:_) (#m #n:pos) (mx: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth mx i j == SB.index (row mx i) j /\ ijth mx i j == SB.index (col mx j) i)
(* This transforms a seq X={Xi} into a seq X={Xi `op` c} *)
let seq_op_const #c #eq (cm: CE.cm c eq) (s: SB.seq c) (const: c)
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult (SB.index s i) const)
(* Well, technically it is the same thing as above, given cm is commutative.
We will still use prefix and postfix applications separately since
sometimes provable equality (==) rather than `eq.eq` comes in handy *)
let const_op_seq #c #eq (cm: CE.cm c eq) (const: c) (s: SB.seq c)
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult const (SB.index s i))
(* We can get a sequence of products (or sums) from two sequences of equal length *)
let seq_of_products #c #eq (mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c {SB.length t == SB.length s})
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> SB.index s i `mul.mult` SB.index t i)
(* As trivial as it seems to be, sometimes this lemma proves to be useful, mostly because
lemma_eq_elim invocation is surprisingly costly resources-wise. *)
val seq_of_products_lemma (#c:_) (#eq:_) (mul: CE.cm c eq)
(s: SB.seq c) (t: SB.seq c {SB.length t == SB.length s})
(r: SB.seq c { SB.equal r (SB.init (SB.length s)
(fun (i: under (SB.length s)) ->
SB.index s i `mul.mult` SB.index t i))})
: Lemma (seq_of_products mul s t == r)
(* The usual dot product of two sequences of equal lengths *)
let dot #c #eq (add mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c{SB.length t == SB.length s})
= SP.foldm_snoc add (seq_of_products mul s t)
val dot_lemma (#c:_) (#eq:_) (add mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c{SB.length t == SB.length s})
: Lemma (dot add mul s t == SP.foldm_snoc add (seq_of_products mul s t))
(* Of course, it would be best to define the matrix product as a convolution,
but we don't have all the necessary framework for that level of generality yet. *)
val matrix_mul (#c:_) (#eq:_) (#m #n #p:pos) (add mul: CE.cm c eq) (mx: matrix c m n) (my: matrix c n p)
: matrix c m p
(* Both distributivity laws hold for matrices as shown below *)
let is_left_distributive #c #eq (mul add: CE.cm c eq) =
forall (x y z: c). mul.mult x (add.mult y z) `eq.eq` add.mult (mul.mult x y) (mul.mult x z)
let is_right_distributive #c #eq (mul add: CE.cm c eq) =
forall (x y z: c). mul.mult (add.mult x y) z `eq.eq` add.mult (mul.mult x z) (mul.mult y z) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mul: FStar.Algebra.CommMonoid.Equiv.cm c eq -> add: FStar.Algebra.CommMonoid.Equiv.cm c eq
-> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"FStar.Algebra.CommMonoid.Equiv.equiv",
"FStar.Algebra.CommMonoid.Equiv.cm",
"Prims.l_and",
"FStar.Matrix.is_left_distributive",
"FStar.Matrix.is_right_distributive",
"Prims.logical"
] | [] | false | false | false | false | true | let is_fully_distributive #c #eq (mul: CE.cm c eq) (add: CE.cm c eq) =
| is_left_distributive mul add /\ is_right_distributive mul add | false |
|
FStar.Matrix.fsti | FStar.Matrix.dual_indices | val dual_indices (m n: pos) (ij: under (m * n))
: Lemma
((get_j n m (transpose_ji m n ij) = get_i m n ij) /\
(get_i n m (transpose_ji m n ij) = get_j m n ij)) | val dual_indices (m n: pos) (ij: under (m * n))
: Lemma
((get_j n m (transpose_ji m n ij) = get_i m n ij) /\
(get_i n m (transpose_ji m n ij) = get_j m n ij)) | let dual_indices (m n: pos) (ij: under (m*n)) : Lemma (
(get_j n m (transpose_ji m n ij) = get_i m n ij) /\
(get_i n m (transpose_ji m n ij) = get_j m n ij))
= consistency_of_ij m n ij;
indices_transpose_lemma m (get_i m n ij) (get_j m n ij) | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 59,
"end_line": 90,
"start_col": 0,
"start_line": 86
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *)
let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n
let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n
(* A proof that getting a 2D index back from the flattened
index works correctly *)
let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
flattened_index_is_under_flattened_size m n i j; //speeds up the proof
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n
(* A proof that getting the flattened index from 2D
indices works correctly *)
let consistency_of_ij (m n: pos) (ij: under (m*n))
: Lemma (get_ij m n (get_i m n ij) (get_j m n ij) == ij) = ()
(* The transposition transform for the flattened index *)
let transpose_ji (m n: pos) (ij: under (m*n)) : under (n*m) =
flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij);
(get_j m n ij)*m + (get_i m n ij)
(* Auxiliary arithmetic lemma *)
let indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j*m+i)%m=i) && ((j*m+i)/m=j)) = ML.lemma_mod_plus i j m
(* A proof of trasnspotition transform bijectivity *)
let ji_is_transpose_of_ij (m n: pos) (ij: under (m*n))
: Lemma (transpose_ji n m (transpose_ji m n ij) = ij) =
indices_transpose_lemma m (get_i m n ij) (get_j m n ij) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: Prims.pos -> n: Prims.pos -> ij: FStar.IntegerIntervals.under (m * n)
-> FStar.Pervasives.Lemma
(ensures
FStar.Matrix.get_j n m (FStar.Matrix.transpose_ji m n ij) = FStar.Matrix.get_i m n ij /\
FStar.Matrix.get_i n m (FStar.Matrix.transpose_ji m n ij) = FStar.Matrix.get_j m n ij) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Prims.pos",
"FStar.IntegerIntervals.under",
"FStar.Mul.op_Star",
"FStar.Matrix.indices_transpose_lemma",
"FStar.Matrix.get_i",
"FStar.Matrix.get_j",
"Prims.unit",
"FStar.Matrix.consistency_of_ij",
"Prims.l_True",
"Prims.squash",
"Prims.l_and",
"Prims.b2t",
"Prims.op_Equality",
"FStar.Matrix.transpose_ji",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let dual_indices (m n: pos) (ij: under (m * n))
: Lemma
((get_j n m (transpose_ji m n ij) = get_i m n ij) /\
(get_i n m (transpose_ji m n ij) = get_j m n ij)) =
| consistency_of_ij m n ij;
indices_transpose_lemma m (get_i m n ij) (get_j m n ij) | false |
FStar.Matrix.fsti | FStar.Matrix.is_absorber | val is_absorber : z: c -> op: FStar.Algebra.CommMonoid.Equiv.cm c eq -> Prims.logical | let is_absorber #c #eq (z:c) (op: CE.cm c eq) =
forall (x:c). op.mult z x `eq.eq` z /\ op.mult x z `eq.eq` z | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 62,
"end_line": 289,
"start_col": 0,
"start_line": 288
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *)
let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n
let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n
(* A proof that getting a 2D index back from the flattened
index works correctly *)
let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
flattened_index_is_under_flattened_size m n i j; //speeds up the proof
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n
(* A proof that getting the flattened index from 2D
indices works correctly *)
let consistency_of_ij (m n: pos) (ij: under (m*n))
: Lemma (get_ij m n (get_i m n ij) (get_j m n ij) == ij) = ()
(* The transposition transform for the flattened index *)
let transpose_ji (m n: pos) (ij: under (m*n)) : under (n*m) =
flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij);
(get_j m n ij)*m + (get_i m n ij)
(* Auxiliary arithmetic lemma *)
let indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j*m+i)%m=i) && ((j*m+i)/m=j)) = ML.lemma_mod_plus i j m
(* A proof of trasnspotition transform bijectivity *)
let ji_is_transpose_of_ij (m n: pos) (ij: under (m*n))
: Lemma (transpose_ji n m (transpose_ji m n ij) = ij) =
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A proof that 2D indices are swapped with the transpotition transform *)
let dual_indices (m n: pos) (ij: under (m*n)) : Lemma (
(get_j n m (transpose_ji m n ij) = get_i m n ij) /\
(get_i n m (transpose_ji m n ij) = get_j m n ij))
= consistency_of_ij m n ij;
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A matrix can always be treated as a flattened seq *)
val seq_of_matrix : (#c: Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) ->
(s:SB.seq c {
SB.length s=m*n /\
(forall (ij: under (m*n)). SB.index s ij == SB.index s (get_ij m n (get_i m n ij) (get_j m n ij)))
})
(* Indexer for a matrix *)
val ijth : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
(t:c{t == SB.index (seq_of_matrix mx) (get_ij m n i j)})
(* Indexer for a matrix returns the correct value *)
val ijth_lemma : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
Lemma (ijth mx i j == SB.index (seq_of_matrix mx) (get_ij m n i j))
(* A matrix can always be constructed from an m*n-sized seq *)
val matrix_of_seq : (#c: Type) -> (m:pos) -> (n:pos) -> (s: SB.seq c{SB.length s = m*n}) -> matrix c m n
(* A type for matrices constructed via concrete generator *)
type matrix_of #c (#m #n: pos) (gen: matrix_generator c m n) = z:matrix c m n {
(forall (i: under m) (j: under n). ijth z i j == gen i j) /\
(forall (ij: under (m*n)). (SB.index (seq_of_matrix z) ij) == (gen (get_i m n ij) (get_j m n ij)))
}
(* Monoid-based fold of a matrix treated as a flat seq *)
val foldm : (#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n) -> c
(* foldm_snoc of the corresponding seq is equal to foldm of the matrix *)
val matrix_fold_equals_fold_of_seq :
(#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n)
-> Lemma (ensures foldm cm mx `eq.eq` SP.foldm_snoc cm (seq_of_matrix mx)) [SMTPat(foldm cm mx)]
(* A matrix constructed from given generator *)
val init : (#c:Type) -> (#m:pos) -> (#n: pos) -> (generator: matrix_generator c m n)
-> matrix_of generator
(* A matrix fold is equal to double foldm_snoc over init-generated seq of seqs *)
val matrix_fold_equals_fold_of_seq_folds : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (ensures foldm cm (init generator) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
/\ SP.foldm_snoc cm (seq_of_matrix (init generator)) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
)
(* This auxiliary lemma shows that the fold of the last line of a matrix
is equal to the corresponding fold of the generator function *)
(* This lemma establishes that the fold of a matrix is equal to
nested Algebra.CommMonoid.Fold.fold over the matrix generator *)
val matrix_fold_equals_func_double_fold : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (foldm cm (init generator) `eq.eq`
CF.fold cm 0 (m-1) (fun (i:under m) -> CF.fold cm 0 (n-1) (generator i)))
val transposed_matrix_gen (#c:_) (#m:pos) (#n:pos) (generator: matrix_generator c m n)
: (f: matrix_generator c n m { forall i j. f j i == generator i j })
val matrix_transpose_is_permutation (#c:_) (#m #n: pos)
(generator: matrix_generator c m n)
: Lemma (SP.is_permutation (seq_of_matrix (init generator))
(seq_of_matrix (init (transposed_matrix_gen generator)))
(transpose_ji m n))
val matrix_fold_equals_fold_of_transpose (#c:_) (#eq:_)
(#m #n: pos)
(cm: CE.cm c eq)
(gen: matrix_generator c m n)
: Lemma (foldm cm (init gen) `eq.eq`
foldm cm (init (transposed_matrix_gen gen)))
(* The equivalence relation defined for matrices of given dimensions *)
val matrix_equiv : (#c: Type) ->
(eq: CE.equiv c) ->
(m: pos) -> (n: pos) ->
CE.equiv (matrix c m n)
(* element-wise matrix equivalence lemma *)
val matrix_equiv_ijth (#c:_) (#m #n: pos) (eq: CE.equiv c)
(ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (requires (matrix_equiv eq m n).eq ma mb)
(ensures ijth ma i j `eq.eq` ijth mb i j)
(* We can always establish matrix equivalence from element-wise equivalence *)
val matrix_equiv_from_element_eq (#c:_) (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
: Lemma (requires (forall (i: under m) (j: under n). ijth ma i j `eq.eq` ijth mb i j))
(ensures (matrix_equiv eq m n).eq ma mb)
(*
Notice that even though we can (and will) construct CommMonoid for matrix addition,
we still publish the operations as well since as soon as we get to multiplication,
results usually have different dimensions, so it would be convenient to have both
the CommMonoid for matrix addition and the explicit addition function.
This becomes the only way with non-square matrix multiplication, since these
would not constitute a monoid to begin with.
*)
(* This version of the lemma is useful if we don't want to invoke
Classical.forall_intro_2 in a big proof to conserve resources *)
let matrix_equiv_from_proof #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
(proof: (i:under m) -> (j:under n) -> Lemma (eq.eq (ijth ma i j) (ijth mb i j)))
: Lemma ((matrix_equiv eq m n).eq ma mb)
= Classical.forall_intro_2 proof;
matrix_equiv_from_element_eq eq ma mb
(* This one is the generator function for sum of matrices *)
let matrix_add_generator #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_generator c m n = fun i j -> add.mult (ijth ma i j) (ijth mb i j)
(* This is the matrix sum operation given the addition CommMonoid *)
let matrix_add #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_of (matrix_add_generator add ma mb)
= init (matrix_add_generator add ma mb)
(* Sum of matrices ijth element lemma *)
let matrix_add_ijth #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth (matrix_add add ma mb) i j == add.mult (ijth ma i j) (ijth mb i j)) = ()
(* m*n-sized matrix addition CommMonoid *)
val matrix_add_comm_monoid : (#c:Type) ->
(#eq:CE.equiv c) ->
(add: CE.cm c eq) ->
(m:pos) -> (n: pos) ->
CE.cm (matrix c m n) (matrix_equiv eq m n)
(* Sometimes we want matrix rows and columns to be accessed as sequences *)
let col #c #m #n (mx: matrix c m n) (j: under n) = SB.init m (fun (i: under m) -> ijth mx i j)
let row #c #m #n (mx: matrix c m n) (i: under m) = SB.init n (fun (j: under n) -> ijth mx i j)
(* ijth-based and row/col-based element access methods are equivalent *)
val matrix_row_col_lemma (#c:_) (#m #n:pos) (mx: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth mx i j == SB.index (row mx i) j /\ ijth mx i j == SB.index (col mx j) i)
(* This transforms a seq X={Xi} into a seq X={Xi `op` c} *)
let seq_op_const #c #eq (cm: CE.cm c eq) (s: SB.seq c) (const: c)
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult (SB.index s i) const)
(* Well, technically it is the same thing as above, given cm is commutative.
We will still use prefix and postfix applications separately since
sometimes provable equality (==) rather than `eq.eq` comes in handy *)
let const_op_seq #c #eq (cm: CE.cm c eq) (const: c) (s: SB.seq c)
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult const (SB.index s i))
(* We can get a sequence of products (or sums) from two sequences of equal length *)
let seq_of_products #c #eq (mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c {SB.length t == SB.length s})
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> SB.index s i `mul.mult` SB.index t i)
(* As trivial as it seems to be, sometimes this lemma proves to be useful, mostly because
lemma_eq_elim invocation is surprisingly costly resources-wise. *)
val seq_of_products_lemma (#c:_) (#eq:_) (mul: CE.cm c eq)
(s: SB.seq c) (t: SB.seq c {SB.length t == SB.length s})
(r: SB.seq c { SB.equal r (SB.init (SB.length s)
(fun (i: under (SB.length s)) ->
SB.index s i `mul.mult` SB.index t i))})
: Lemma (seq_of_products mul s t == r)
(* The usual dot product of two sequences of equal lengths *)
let dot #c #eq (add mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c{SB.length t == SB.length s})
= SP.foldm_snoc add (seq_of_products mul s t)
val dot_lemma (#c:_) (#eq:_) (add mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c{SB.length t == SB.length s})
: Lemma (dot add mul s t == SP.foldm_snoc add (seq_of_products mul s t))
(* Of course, it would be best to define the matrix product as a convolution,
but we don't have all the necessary framework for that level of generality yet. *)
val matrix_mul (#c:_) (#eq:_) (#m #n #p:pos) (add mul: CE.cm c eq) (mx: matrix c m n) (my: matrix c n p)
: matrix c m p
(* Both distributivity laws hold for matrices as shown below *)
let is_left_distributive #c #eq (mul add: CE.cm c eq) =
forall (x y z: c). mul.mult x (add.mult y z) `eq.eq` add.mult (mul.mult x y) (mul.mult x z)
let is_right_distributive #c #eq (mul add: CE.cm c eq) =
forall (x y z: c). mul.mult (add.mult x y) z `eq.eq` add.mult (mul.mult x z) (mul.mult y z)
let is_fully_distributive #c #eq (mul add: CE.cm c eq) = is_left_distributive mul add /\ is_right_distributive mul add
(*
This definition is of course far more general than matrices, and should rather
be a part of algebra core, as it is relevant to any magma.
In the process of development of F* abstract algebra framework, this definition
will probably take its rightful place near the most basic of grouplike structures.
Also note that this property is defined via forall. We would probably want
to make such properties opaque to SMT in the future, to avoid verification performance
issues. | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | z: c -> op: FStar.Algebra.CommMonoid.Equiv.cm c eq -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"FStar.Algebra.CommMonoid.Equiv.equiv",
"FStar.Algebra.CommMonoid.Equiv.cm",
"Prims.l_Forall",
"Prims.l_and",
"FStar.Algebra.CommMonoid.Equiv.__proj__EQ__item__eq",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult",
"Prims.logical"
] | [] | false | false | false | false | true | let is_absorber #c #eq (z: c) (op: CE.cm c eq) =
| forall (x: c). (op.mult z x) `eq.eq` z /\ (op.mult x z) `eq.eq` z | false |
|
FStar.Matrix.fsti | FStar.Matrix.matrix_equiv_from_proof | val matrix_equiv_from_proof
(#c: _)
(#m #n: pos)
(eq: CE.equiv c)
(ma mb: matrix c m n)
(proof: (i: under m -> j: under n -> Lemma (eq.eq (ijth ma i j) (ijth mb i j))))
: Lemma ((matrix_equiv eq m n).eq ma mb) | val matrix_equiv_from_proof
(#c: _)
(#m #n: pos)
(eq: CE.equiv c)
(ma mb: matrix c m n)
(proof: (i: under m -> j: under n -> Lemma (eq.eq (ijth ma i j) (ijth mb i j))))
: Lemma ((matrix_equiv eq m n).eq ma mb) | let matrix_equiv_from_proof #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
(proof: (i:under m) -> (j:under n) -> Lemma (eq.eq (ijth ma i j) (ijth mb i j)))
: Lemma ((matrix_equiv eq m n).eq ma mb)
= Classical.forall_intro_2 proof;
matrix_equiv_from_element_eq eq ma mb | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 41,
"end_line": 200,
"start_col": 0,
"start_line": 196
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *)
let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n
let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n
(* A proof that getting a 2D index back from the flattened
index works correctly *)
let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
flattened_index_is_under_flattened_size m n i j; //speeds up the proof
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n
(* A proof that getting the flattened index from 2D
indices works correctly *)
let consistency_of_ij (m n: pos) (ij: under (m*n))
: Lemma (get_ij m n (get_i m n ij) (get_j m n ij) == ij) = ()
(* The transposition transform for the flattened index *)
let transpose_ji (m n: pos) (ij: under (m*n)) : under (n*m) =
flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij);
(get_j m n ij)*m + (get_i m n ij)
(* Auxiliary arithmetic lemma *)
let indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j*m+i)%m=i) && ((j*m+i)/m=j)) = ML.lemma_mod_plus i j m
(* A proof of trasnspotition transform bijectivity *)
let ji_is_transpose_of_ij (m n: pos) (ij: under (m*n))
: Lemma (transpose_ji n m (transpose_ji m n ij) = ij) =
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A proof that 2D indices are swapped with the transpotition transform *)
let dual_indices (m n: pos) (ij: under (m*n)) : Lemma (
(get_j n m (transpose_ji m n ij) = get_i m n ij) /\
(get_i n m (transpose_ji m n ij) = get_j m n ij))
= consistency_of_ij m n ij;
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A matrix can always be treated as a flattened seq *)
val seq_of_matrix : (#c: Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) ->
(s:SB.seq c {
SB.length s=m*n /\
(forall (ij: under (m*n)). SB.index s ij == SB.index s (get_ij m n (get_i m n ij) (get_j m n ij)))
})
(* Indexer for a matrix *)
val ijth : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
(t:c{t == SB.index (seq_of_matrix mx) (get_ij m n i j)})
(* Indexer for a matrix returns the correct value *)
val ijth_lemma : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
Lemma (ijth mx i j == SB.index (seq_of_matrix mx) (get_ij m n i j))
(* A matrix can always be constructed from an m*n-sized seq *)
val matrix_of_seq : (#c: Type) -> (m:pos) -> (n:pos) -> (s: SB.seq c{SB.length s = m*n}) -> matrix c m n
(* A type for matrices constructed via concrete generator *)
type matrix_of #c (#m #n: pos) (gen: matrix_generator c m n) = z:matrix c m n {
(forall (i: under m) (j: under n). ijth z i j == gen i j) /\
(forall (ij: under (m*n)). (SB.index (seq_of_matrix z) ij) == (gen (get_i m n ij) (get_j m n ij)))
}
(* Monoid-based fold of a matrix treated as a flat seq *)
val foldm : (#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n) -> c
(* foldm_snoc of the corresponding seq is equal to foldm of the matrix *)
val matrix_fold_equals_fold_of_seq :
(#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n)
-> Lemma (ensures foldm cm mx `eq.eq` SP.foldm_snoc cm (seq_of_matrix mx)) [SMTPat(foldm cm mx)]
(* A matrix constructed from given generator *)
val init : (#c:Type) -> (#m:pos) -> (#n: pos) -> (generator: matrix_generator c m n)
-> matrix_of generator
(* A matrix fold is equal to double foldm_snoc over init-generated seq of seqs *)
val matrix_fold_equals_fold_of_seq_folds : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (ensures foldm cm (init generator) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
/\ SP.foldm_snoc cm (seq_of_matrix (init generator)) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
)
(* This auxiliary lemma shows that the fold of the last line of a matrix
is equal to the corresponding fold of the generator function *)
(* This lemma establishes that the fold of a matrix is equal to
nested Algebra.CommMonoid.Fold.fold over the matrix generator *)
val matrix_fold_equals_func_double_fold : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (foldm cm (init generator) `eq.eq`
CF.fold cm 0 (m-1) (fun (i:under m) -> CF.fold cm 0 (n-1) (generator i)))
val transposed_matrix_gen (#c:_) (#m:pos) (#n:pos) (generator: matrix_generator c m n)
: (f: matrix_generator c n m { forall i j. f j i == generator i j })
val matrix_transpose_is_permutation (#c:_) (#m #n: pos)
(generator: matrix_generator c m n)
: Lemma (SP.is_permutation (seq_of_matrix (init generator))
(seq_of_matrix (init (transposed_matrix_gen generator)))
(transpose_ji m n))
val matrix_fold_equals_fold_of_transpose (#c:_) (#eq:_)
(#m #n: pos)
(cm: CE.cm c eq)
(gen: matrix_generator c m n)
: Lemma (foldm cm (init gen) `eq.eq`
foldm cm (init (transposed_matrix_gen gen)))
(* The equivalence relation defined for matrices of given dimensions *)
val matrix_equiv : (#c: Type) ->
(eq: CE.equiv c) ->
(m: pos) -> (n: pos) ->
CE.equiv (matrix c m n)
(* element-wise matrix equivalence lemma *)
val matrix_equiv_ijth (#c:_) (#m #n: pos) (eq: CE.equiv c)
(ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (requires (matrix_equiv eq m n).eq ma mb)
(ensures ijth ma i j `eq.eq` ijth mb i j)
(* We can always establish matrix equivalence from element-wise equivalence *)
val matrix_equiv_from_element_eq (#c:_) (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
: Lemma (requires (forall (i: under m) (j: under n). ijth ma i j `eq.eq` ijth mb i j))
(ensures (matrix_equiv eq m n).eq ma mb)
(*
Notice that even though we can (and will) construct CommMonoid for matrix addition,
we still publish the operations as well since as soon as we get to multiplication,
results usually have different dimensions, so it would be convenient to have both
the CommMonoid for matrix addition and the explicit addition function.
This becomes the only way with non-square matrix multiplication, since these
would not constitute a monoid to begin with.
*)
(* This version of the lemma is useful if we don't want to invoke | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
eq: FStar.Algebra.CommMonoid.Equiv.equiv c ->
ma: FStar.Matrix.matrix c m n ->
mb: FStar.Matrix.matrix c m n ->
proof:
(i: FStar.IntegerIntervals.under m -> j: FStar.IntegerIntervals.under n
-> FStar.Pervasives.Lemma
(ensures EQ?.eq eq (FStar.Matrix.ijth ma i j) (FStar.Matrix.ijth mb i j)))
-> FStar.Pervasives.Lemma (ensures EQ?.eq (FStar.Matrix.matrix_equiv eq m n) ma mb) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Prims.pos",
"FStar.Algebra.CommMonoid.Equiv.equiv",
"FStar.Matrix.matrix",
"FStar.IntegerIntervals.under",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"FStar.Algebra.CommMonoid.Equiv.__proj__EQ__item__eq",
"FStar.Matrix.ijth",
"Prims.Nil",
"FStar.Pervasives.pattern",
"FStar.Matrix.matrix_equiv_from_element_eq",
"FStar.Classical.forall_intro_2",
"FStar.Matrix.matrix_equiv"
] | [] | false | false | true | false | false | let matrix_equiv_from_proof
#c
(#m: pos)
(#n: pos)
(eq: CE.equiv c)
(ma: matrix c m n)
(mb: matrix c m n)
(proof: (i: under m -> j: under n -> Lemma (eq.eq (ijth ma i j) (ijth mb i j))))
: Lemma ((matrix_equiv eq m n).eq ma mb) =
| Classical.forall_intro_2 proof;
matrix_equiv_from_element_eq eq ma mb | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.tot_parse_tagged_union | val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
)) | val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
)) | let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 64,
"end_line": 269,
"start_col": 0,
"start_line": 267
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
pt: LowParse.Spec.Base.tot_parser kt tag_t ->
tag_of_data: (_: data_t -> tag_t) ->
p:
(t: tag_t
-> LowParse.Spec.Base.tot_parser k (LowParse.Spec.Base.refine_with_tag tag_of_data t))
-> Prims.Pure
(LowParse.Spec.Base.tot_parser (LowParse.Spec.Combinators.and_then_kind kt k) data_t) | Prims.Pure | [] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.tot_parser",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Spec.Combinators.tot_and_then",
"LowParse.Spec.Combinators.tot_parse_tagged_union_payload",
"Prims.unit",
"LowParse.Spec.Combinators.parse_tagged_union_payload_and_then_cases_injective",
"LowParse.Spec.Combinators.and_then_kind"
] | [] | false | false | false | false | false | let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
| parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` (tot_parse_tagged_union_payload tag_of_data p) | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.tot_parse_filter | val tot_parse_filter
(#k: parser_kind)
(#t: Type)
(p: tot_parser k t)
(f: (t -> Tot bool))
: Pure (tot_parser (parse_filter_kind k) (parse_filter_refine f))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_filter #k p f) x
)) | val tot_parse_filter
(#k: parser_kind)
(#t: Type)
(p: tot_parser k t)
(f: (t -> Tot bool))
: Pure (tot_parser (parse_filter_kind k) (parse_filter_refine f))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_filter #k p f) x
)) | let tot_parse_filter
#k #t p f
= p `tot_and_then` (tot_parse_filter_payload f) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 47,
"end_line": 693,
"start_col": 0,
"start_line": 691
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= ()
let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 =
Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2
let serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
= serialize_tagged_union
s1
fst
(fun x -> serialize_synth p2 (fun y -> (x, y) <: refine_with_tag fst x) s2 (fun (xy: refine_with_tag fst x) -> snd xy) ())
let serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
= ()
let length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
= ()
let serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_left s (serialize s1 y);
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x))
let serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
= serialize_nondep_then_upd_left s1 s2 x y;
let s = serialize (serialize_nondep_then s1 s2) x in
let s1' = serialize s1 (fst x) in
let l1 = Seq.length s1' in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) s1' (serialize s2 (snd x));
seq_upd_seq_right_to_left s 0 s1' i' s';
seq_upd_seq_slice_idem s 0 (Seq.length s1')
let serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
= serialize_nondep_then_upd_left s1 s2 x y
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
= let j' = Seq.length (serialize s1 (fst x)) - i' - Seq.length s' in
serialize_nondep_then_upd_left_chain s1 s2 x y j' s';
assert (j' == Seq.length (serialize (serialize_nondep_then s1 s2) x) - (Seq.length (serialize s2 (snd x)) + i') - Seq.length s')
let serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_right s (serialize s2 y);
let l2 = Seq.length s - Seq.length (serialize s2 (snd x)) in
Seq.lemma_split s l2;
Seq.lemma_append_inj (Seq.slice s 0 l2) (Seq.slice s l2 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x))
let serialize_nondep_then_upd_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
l1 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s'
))
= serialize_nondep_then_upd_right s1 s2 x y;
let s = serialize (serialize_nondep_then s1 s2) x in
let s2' = serialize s2 (snd x) in
let l2 = Seq.length s - Seq.length s2' in
Seq.lemma_split s l2;
Seq.lemma_append_inj (Seq.slice s 0 l2) (Seq.slice s l2 (Seq.length s)) (serialize s1 (fst x)) s2';
seq_upd_seq_right_to_left s l2 s2' i' s';
seq_upd_seq_slice_idem s l2 (Seq.length s)
#reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let make_total_constant_size_parser_compose
(sz: nat)
(t1 t2: Type)
(f1: ((s: bytes {Seq.length s == sz}) -> GTot t1))
(g2: t1 -> GTot t2)
: Lemma
(requires (
make_total_constant_size_parser_precond sz t1 f1 /\
(forall x x' . g2 x == g2 x' ==> x == x')
))
(ensures (
make_total_constant_size_parser_precond sz t1 f1 /\
make_total_constant_size_parser_precond sz t2 (f1 `compose` g2) /\
(forall x x' . {:pattern (g2 x); (g2 x')} g2 x == g2 x' ==> x == x') /\
(forall input . {:pattern (parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input)} parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input == parse (make_total_constant_size_parser sz t1 f1 `parse_synth` g2) input)
))
= ()
let parse_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(f: (t -> GTot bool))
: Tot (parser (parse_filter_kind k) (parse_filter_refine f))
= p `and_then` (parse_filter_payload f)
let parse_filter_eq
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(f: (t -> GTot bool))
(input: bytes)
: Lemma
(parse (parse_filter p f) input == (match parse p input with
| None -> None
| Some (x, consumed) ->
if f x
then Some (x, consumed)
else None
))
= ()
let tot_parse_filter_payload
(#t: Type)
(f: (t -> Tot bool))
(v: t)
: Tot (tot_parser parse_filter_payload_kind (parse_filter_refine f))
= let p : tot_bare_parser (parse_filter_refine f) =
if f v
then
let v' : (x: t { f x == true } ) = v in
tot_weaken parse_filter_payload_kind (tot_parse_ret v')
else tot_fail_parser parse_filter_payload_kind (parse_filter_refine f)
in
parser_kind_prop_equiv parse_filter_payload_kind p;
p | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowParse.Spec.Base.tot_parser k t -> f: (_: t -> Prims.bool)
-> Prims.Pure
(LowParse.Spec.Base.tot_parser (LowParse.Spec.Combinators.parse_filter_kind k)
(LowParse.Spec.Combinators.parse_filter_refine f)) | Prims.Pure | [] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.tot_parser",
"Prims.bool",
"LowParse.Spec.Combinators.tot_and_then",
"LowParse.Spec.Combinators.parse_filter_payload_kind",
"LowParse.Spec.Combinators.parse_filter_refine",
"LowParse.Spec.Combinators.tot_parse_filter_payload",
"LowParse.Spec.Combinators.parse_filter_kind"
] | [] | false | false | false | false | false | let tot_parse_filter #k #t p f =
| p `tot_and_then` (tot_parse_filter_payload f) | false |
FStar.Matrix.fsti | FStar.Matrix.col | val col : mx: FStar.Matrix.matrix c m n -> j: FStar.IntegerIntervals.under n -> FStar.Seq.Base.seq c | let col #c #m #n (mx: matrix c m n) (j: under n) = SB.init m (fun (i: under m) -> ijth mx i j) | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 94,
"end_line": 224,
"start_col": 0,
"start_line": 224
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *)
let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n
let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n
(* A proof that getting a 2D index back from the flattened
index works correctly *)
let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
flattened_index_is_under_flattened_size m n i j; //speeds up the proof
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n
(* A proof that getting the flattened index from 2D
indices works correctly *)
let consistency_of_ij (m n: pos) (ij: under (m*n))
: Lemma (get_ij m n (get_i m n ij) (get_j m n ij) == ij) = ()
(* The transposition transform for the flattened index *)
let transpose_ji (m n: pos) (ij: under (m*n)) : under (n*m) =
flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij);
(get_j m n ij)*m + (get_i m n ij)
(* Auxiliary arithmetic lemma *)
let indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j*m+i)%m=i) && ((j*m+i)/m=j)) = ML.lemma_mod_plus i j m
(* A proof of trasnspotition transform bijectivity *)
let ji_is_transpose_of_ij (m n: pos) (ij: under (m*n))
: Lemma (transpose_ji n m (transpose_ji m n ij) = ij) =
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A proof that 2D indices are swapped with the transpotition transform *)
let dual_indices (m n: pos) (ij: under (m*n)) : Lemma (
(get_j n m (transpose_ji m n ij) = get_i m n ij) /\
(get_i n m (transpose_ji m n ij) = get_j m n ij))
= consistency_of_ij m n ij;
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A matrix can always be treated as a flattened seq *)
val seq_of_matrix : (#c: Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) ->
(s:SB.seq c {
SB.length s=m*n /\
(forall (ij: under (m*n)). SB.index s ij == SB.index s (get_ij m n (get_i m n ij) (get_j m n ij)))
})
(* Indexer for a matrix *)
val ijth : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
(t:c{t == SB.index (seq_of_matrix mx) (get_ij m n i j)})
(* Indexer for a matrix returns the correct value *)
val ijth_lemma : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
Lemma (ijth mx i j == SB.index (seq_of_matrix mx) (get_ij m n i j))
(* A matrix can always be constructed from an m*n-sized seq *)
val matrix_of_seq : (#c: Type) -> (m:pos) -> (n:pos) -> (s: SB.seq c{SB.length s = m*n}) -> matrix c m n
(* A type for matrices constructed via concrete generator *)
type matrix_of #c (#m #n: pos) (gen: matrix_generator c m n) = z:matrix c m n {
(forall (i: under m) (j: under n). ijth z i j == gen i j) /\
(forall (ij: under (m*n)). (SB.index (seq_of_matrix z) ij) == (gen (get_i m n ij) (get_j m n ij)))
}
(* Monoid-based fold of a matrix treated as a flat seq *)
val foldm : (#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n) -> c
(* foldm_snoc of the corresponding seq is equal to foldm of the matrix *)
val matrix_fold_equals_fold_of_seq :
(#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n)
-> Lemma (ensures foldm cm mx `eq.eq` SP.foldm_snoc cm (seq_of_matrix mx)) [SMTPat(foldm cm mx)]
(* A matrix constructed from given generator *)
val init : (#c:Type) -> (#m:pos) -> (#n: pos) -> (generator: matrix_generator c m n)
-> matrix_of generator
(* A matrix fold is equal to double foldm_snoc over init-generated seq of seqs *)
val matrix_fold_equals_fold_of_seq_folds : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (ensures foldm cm (init generator) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
/\ SP.foldm_snoc cm (seq_of_matrix (init generator)) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
)
(* This auxiliary lemma shows that the fold of the last line of a matrix
is equal to the corresponding fold of the generator function *)
(* This lemma establishes that the fold of a matrix is equal to
nested Algebra.CommMonoid.Fold.fold over the matrix generator *)
val matrix_fold_equals_func_double_fold : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (foldm cm (init generator) `eq.eq`
CF.fold cm 0 (m-1) (fun (i:under m) -> CF.fold cm 0 (n-1) (generator i)))
val transposed_matrix_gen (#c:_) (#m:pos) (#n:pos) (generator: matrix_generator c m n)
: (f: matrix_generator c n m { forall i j. f j i == generator i j })
val matrix_transpose_is_permutation (#c:_) (#m #n: pos)
(generator: matrix_generator c m n)
: Lemma (SP.is_permutation (seq_of_matrix (init generator))
(seq_of_matrix (init (transposed_matrix_gen generator)))
(transpose_ji m n))
val matrix_fold_equals_fold_of_transpose (#c:_) (#eq:_)
(#m #n: pos)
(cm: CE.cm c eq)
(gen: matrix_generator c m n)
: Lemma (foldm cm (init gen) `eq.eq`
foldm cm (init (transposed_matrix_gen gen)))
(* The equivalence relation defined for matrices of given dimensions *)
val matrix_equiv : (#c: Type) ->
(eq: CE.equiv c) ->
(m: pos) -> (n: pos) ->
CE.equiv (matrix c m n)
(* element-wise matrix equivalence lemma *)
val matrix_equiv_ijth (#c:_) (#m #n: pos) (eq: CE.equiv c)
(ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (requires (matrix_equiv eq m n).eq ma mb)
(ensures ijth ma i j `eq.eq` ijth mb i j)
(* We can always establish matrix equivalence from element-wise equivalence *)
val matrix_equiv_from_element_eq (#c:_) (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
: Lemma (requires (forall (i: under m) (j: under n). ijth ma i j `eq.eq` ijth mb i j))
(ensures (matrix_equiv eq m n).eq ma mb)
(*
Notice that even though we can (and will) construct CommMonoid for matrix addition,
we still publish the operations as well since as soon as we get to multiplication,
results usually have different dimensions, so it would be convenient to have both
the CommMonoid for matrix addition and the explicit addition function.
This becomes the only way with non-square matrix multiplication, since these
would not constitute a monoid to begin with.
*)
(* This version of the lemma is useful if we don't want to invoke
Classical.forall_intro_2 in a big proof to conserve resources *)
let matrix_equiv_from_proof #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
(proof: (i:under m) -> (j:under n) -> Lemma (eq.eq (ijth ma i j) (ijth mb i j)))
: Lemma ((matrix_equiv eq m n).eq ma mb)
= Classical.forall_intro_2 proof;
matrix_equiv_from_element_eq eq ma mb
(* This one is the generator function for sum of matrices *)
let matrix_add_generator #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_generator c m n = fun i j -> add.mult (ijth ma i j) (ijth mb i j)
(* This is the matrix sum operation given the addition CommMonoid *)
let matrix_add #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_of (matrix_add_generator add ma mb)
= init (matrix_add_generator add ma mb)
(* Sum of matrices ijth element lemma *)
let matrix_add_ijth #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth (matrix_add add ma mb) i j == add.mult (ijth ma i j) (ijth mb i j)) = ()
(* m*n-sized matrix addition CommMonoid *)
val matrix_add_comm_monoid : (#c:Type) ->
(#eq:CE.equiv c) ->
(add: CE.cm c eq) ->
(m:pos) -> (n: pos) ->
CE.cm (matrix c m n) (matrix_equiv eq m n) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mx: FStar.Matrix.matrix c m n -> j: FStar.IntegerIntervals.under n -> FStar.Seq.Base.seq c | Prims.Tot | [
"total"
] | [] | [
"Prims.pos",
"FStar.Matrix.matrix",
"FStar.IntegerIntervals.under",
"FStar.Seq.Base.init",
"FStar.Matrix.ijth",
"FStar.Seq.Base.seq"
] | [] | false | false | false | false | false | let col #c #m #n (mx: matrix c m n) (j: under n) =
| SB.init m (fun (i: under m) -> ijth mx i j) | false |
|
FStar.Matrix.fsti | FStar.Matrix.matrix_add_generator | val matrix_add_generator (#c #eq: _) (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_generator c m n | val matrix_add_generator (#c #eq: _) (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_generator c m n | let matrix_add_generator #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_generator c m n = fun i j -> add.mult (ijth ma i j) (ijth mb i j) | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 76,
"end_line": 204,
"start_col": 0,
"start_line": 203
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *)
let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n
let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n
(* A proof that getting a 2D index back from the flattened
index works correctly *)
let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
flattened_index_is_under_flattened_size m n i j; //speeds up the proof
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n
(* A proof that getting the flattened index from 2D
indices works correctly *)
let consistency_of_ij (m n: pos) (ij: under (m*n))
: Lemma (get_ij m n (get_i m n ij) (get_j m n ij) == ij) = ()
(* The transposition transform for the flattened index *)
let transpose_ji (m n: pos) (ij: under (m*n)) : under (n*m) =
flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij);
(get_j m n ij)*m + (get_i m n ij)
(* Auxiliary arithmetic lemma *)
let indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j*m+i)%m=i) && ((j*m+i)/m=j)) = ML.lemma_mod_plus i j m
(* A proof of trasnspotition transform bijectivity *)
let ji_is_transpose_of_ij (m n: pos) (ij: under (m*n))
: Lemma (transpose_ji n m (transpose_ji m n ij) = ij) =
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A proof that 2D indices are swapped with the transpotition transform *)
let dual_indices (m n: pos) (ij: under (m*n)) : Lemma (
(get_j n m (transpose_ji m n ij) = get_i m n ij) /\
(get_i n m (transpose_ji m n ij) = get_j m n ij))
= consistency_of_ij m n ij;
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A matrix can always be treated as a flattened seq *)
val seq_of_matrix : (#c: Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) ->
(s:SB.seq c {
SB.length s=m*n /\
(forall (ij: under (m*n)). SB.index s ij == SB.index s (get_ij m n (get_i m n ij) (get_j m n ij)))
})
(* Indexer for a matrix *)
val ijth : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
(t:c{t == SB.index (seq_of_matrix mx) (get_ij m n i j)})
(* Indexer for a matrix returns the correct value *)
val ijth_lemma : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
Lemma (ijth mx i j == SB.index (seq_of_matrix mx) (get_ij m n i j))
(* A matrix can always be constructed from an m*n-sized seq *)
val matrix_of_seq : (#c: Type) -> (m:pos) -> (n:pos) -> (s: SB.seq c{SB.length s = m*n}) -> matrix c m n
(* A type for matrices constructed via concrete generator *)
type matrix_of #c (#m #n: pos) (gen: matrix_generator c m n) = z:matrix c m n {
(forall (i: under m) (j: under n). ijth z i j == gen i j) /\
(forall (ij: under (m*n)). (SB.index (seq_of_matrix z) ij) == (gen (get_i m n ij) (get_j m n ij)))
}
(* Monoid-based fold of a matrix treated as a flat seq *)
val foldm : (#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n) -> c
(* foldm_snoc of the corresponding seq is equal to foldm of the matrix *)
val matrix_fold_equals_fold_of_seq :
(#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n)
-> Lemma (ensures foldm cm mx `eq.eq` SP.foldm_snoc cm (seq_of_matrix mx)) [SMTPat(foldm cm mx)]
(* A matrix constructed from given generator *)
val init : (#c:Type) -> (#m:pos) -> (#n: pos) -> (generator: matrix_generator c m n)
-> matrix_of generator
(* A matrix fold is equal to double foldm_snoc over init-generated seq of seqs *)
val matrix_fold_equals_fold_of_seq_folds : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (ensures foldm cm (init generator) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
/\ SP.foldm_snoc cm (seq_of_matrix (init generator)) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
)
(* This auxiliary lemma shows that the fold of the last line of a matrix
is equal to the corresponding fold of the generator function *)
(* This lemma establishes that the fold of a matrix is equal to
nested Algebra.CommMonoid.Fold.fold over the matrix generator *)
val matrix_fold_equals_func_double_fold : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (foldm cm (init generator) `eq.eq`
CF.fold cm 0 (m-1) (fun (i:under m) -> CF.fold cm 0 (n-1) (generator i)))
val transposed_matrix_gen (#c:_) (#m:pos) (#n:pos) (generator: matrix_generator c m n)
: (f: matrix_generator c n m { forall i j. f j i == generator i j })
val matrix_transpose_is_permutation (#c:_) (#m #n: pos)
(generator: matrix_generator c m n)
: Lemma (SP.is_permutation (seq_of_matrix (init generator))
(seq_of_matrix (init (transposed_matrix_gen generator)))
(transpose_ji m n))
val matrix_fold_equals_fold_of_transpose (#c:_) (#eq:_)
(#m #n: pos)
(cm: CE.cm c eq)
(gen: matrix_generator c m n)
: Lemma (foldm cm (init gen) `eq.eq`
foldm cm (init (transposed_matrix_gen gen)))
(* The equivalence relation defined for matrices of given dimensions *)
val matrix_equiv : (#c: Type) ->
(eq: CE.equiv c) ->
(m: pos) -> (n: pos) ->
CE.equiv (matrix c m n)
(* element-wise matrix equivalence lemma *)
val matrix_equiv_ijth (#c:_) (#m #n: pos) (eq: CE.equiv c)
(ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (requires (matrix_equiv eq m n).eq ma mb)
(ensures ijth ma i j `eq.eq` ijth mb i j)
(* We can always establish matrix equivalence from element-wise equivalence *)
val matrix_equiv_from_element_eq (#c:_) (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
: Lemma (requires (forall (i: under m) (j: under n). ijth ma i j `eq.eq` ijth mb i j))
(ensures (matrix_equiv eq m n).eq ma mb)
(*
Notice that even though we can (and will) construct CommMonoid for matrix addition,
we still publish the operations as well since as soon as we get to multiplication,
results usually have different dimensions, so it would be convenient to have both
the CommMonoid for matrix addition and the explicit addition function.
This becomes the only way with non-square matrix multiplication, since these
would not constitute a monoid to begin with.
*)
(* This version of the lemma is useful if we don't want to invoke
Classical.forall_intro_2 in a big proof to conserve resources *)
let matrix_equiv_from_proof #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
(proof: (i:under m) -> (j:under n) -> Lemma (eq.eq (ijth ma i j) (ijth mb i j)))
: Lemma ((matrix_equiv eq m n).eq ma mb)
= Classical.forall_intro_2 proof;
matrix_equiv_from_element_eq eq ma mb | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
add: FStar.Algebra.CommMonoid.Equiv.cm c eq ->
ma: FStar.Matrix.matrix c m n ->
mb: FStar.Matrix.matrix c m n
-> FStar.Matrix.matrix_generator c m n | Prims.Tot | [
"total"
] | [] | [
"FStar.Algebra.CommMonoid.Equiv.equiv",
"Prims.pos",
"FStar.Algebra.CommMonoid.Equiv.cm",
"FStar.Matrix.matrix",
"FStar.IntegerIntervals.under",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult",
"FStar.Matrix.ijth",
"FStar.Matrix.matrix_generator"
] | [] | false | false | false | false | false | let matrix_add_generator
#c
#eq
(#m: pos)
(#n: pos)
(add: CE.cm c eq)
(ma: matrix c m n)
(mb: matrix c m n)
: matrix_generator c m n =
| fun i j -> add.mult (ijth ma i j) (ijth mb i j) | false |
FStar.Matrix.fsti | FStar.Matrix.const_op_seq | val const_op_seq : cm: FStar.Algebra.CommMonoid.Equiv.cm c eq -> const: c -> s: FStar.Seq.Base.seq c
-> FStar.Seq.Base.seq c | let const_op_seq #c #eq (cm: CE.cm c eq) (const: c) (s: SB.seq c)
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult const (SB.index s i)) | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 88,
"end_line": 240,
"start_col": 0,
"start_line": 239
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *)
let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n
let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n
(* A proof that getting a 2D index back from the flattened
index works correctly *)
let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
flattened_index_is_under_flattened_size m n i j; //speeds up the proof
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n
(* A proof that getting the flattened index from 2D
indices works correctly *)
let consistency_of_ij (m n: pos) (ij: under (m*n))
: Lemma (get_ij m n (get_i m n ij) (get_j m n ij) == ij) = ()
(* The transposition transform for the flattened index *)
let transpose_ji (m n: pos) (ij: under (m*n)) : under (n*m) =
flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij);
(get_j m n ij)*m + (get_i m n ij)
(* Auxiliary arithmetic lemma *)
let indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j*m+i)%m=i) && ((j*m+i)/m=j)) = ML.lemma_mod_plus i j m
(* A proof of trasnspotition transform bijectivity *)
let ji_is_transpose_of_ij (m n: pos) (ij: under (m*n))
: Lemma (transpose_ji n m (transpose_ji m n ij) = ij) =
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A proof that 2D indices are swapped with the transpotition transform *)
let dual_indices (m n: pos) (ij: under (m*n)) : Lemma (
(get_j n m (transpose_ji m n ij) = get_i m n ij) /\
(get_i n m (transpose_ji m n ij) = get_j m n ij))
= consistency_of_ij m n ij;
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A matrix can always be treated as a flattened seq *)
val seq_of_matrix : (#c: Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) ->
(s:SB.seq c {
SB.length s=m*n /\
(forall (ij: under (m*n)). SB.index s ij == SB.index s (get_ij m n (get_i m n ij) (get_j m n ij)))
})
(* Indexer for a matrix *)
val ijth : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
(t:c{t == SB.index (seq_of_matrix mx) (get_ij m n i j)})
(* Indexer for a matrix returns the correct value *)
val ijth_lemma : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
Lemma (ijth mx i j == SB.index (seq_of_matrix mx) (get_ij m n i j))
(* A matrix can always be constructed from an m*n-sized seq *)
val matrix_of_seq : (#c: Type) -> (m:pos) -> (n:pos) -> (s: SB.seq c{SB.length s = m*n}) -> matrix c m n
(* A type for matrices constructed via concrete generator *)
type matrix_of #c (#m #n: pos) (gen: matrix_generator c m n) = z:matrix c m n {
(forall (i: under m) (j: under n). ijth z i j == gen i j) /\
(forall (ij: under (m*n)). (SB.index (seq_of_matrix z) ij) == (gen (get_i m n ij) (get_j m n ij)))
}
(* Monoid-based fold of a matrix treated as a flat seq *)
val foldm : (#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n) -> c
(* foldm_snoc of the corresponding seq is equal to foldm of the matrix *)
val matrix_fold_equals_fold_of_seq :
(#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n)
-> Lemma (ensures foldm cm mx `eq.eq` SP.foldm_snoc cm (seq_of_matrix mx)) [SMTPat(foldm cm mx)]
(* A matrix constructed from given generator *)
val init : (#c:Type) -> (#m:pos) -> (#n: pos) -> (generator: matrix_generator c m n)
-> matrix_of generator
(* A matrix fold is equal to double foldm_snoc over init-generated seq of seqs *)
val matrix_fold_equals_fold_of_seq_folds : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (ensures foldm cm (init generator) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
/\ SP.foldm_snoc cm (seq_of_matrix (init generator)) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
)
(* This auxiliary lemma shows that the fold of the last line of a matrix
is equal to the corresponding fold of the generator function *)
(* This lemma establishes that the fold of a matrix is equal to
nested Algebra.CommMonoid.Fold.fold over the matrix generator *)
val matrix_fold_equals_func_double_fold : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (foldm cm (init generator) `eq.eq`
CF.fold cm 0 (m-1) (fun (i:under m) -> CF.fold cm 0 (n-1) (generator i)))
val transposed_matrix_gen (#c:_) (#m:pos) (#n:pos) (generator: matrix_generator c m n)
: (f: matrix_generator c n m { forall i j. f j i == generator i j })
val matrix_transpose_is_permutation (#c:_) (#m #n: pos)
(generator: matrix_generator c m n)
: Lemma (SP.is_permutation (seq_of_matrix (init generator))
(seq_of_matrix (init (transposed_matrix_gen generator)))
(transpose_ji m n))
val matrix_fold_equals_fold_of_transpose (#c:_) (#eq:_)
(#m #n: pos)
(cm: CE.cm c eq)
(gen: matrix_generator c m n)
: Lemma (foldm cm (init gen) `eq.eq`
foldm cm (init (transposed_matrix_gen gen)))
(* The equivalence relation defined for matrices of given dimensions *)
val matrix_equiv : (#c: Type) ->
(eq: CE.equiv c) ->
(m: pos) -> (n: pos) ->
CE.equiv (matrix c m n)
(* element-wise matrix equivalence lemma *)
val matrix_equiv_ijth (#c:_) (#m #n: pos) (eq: CE.equiv c)
(ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (requires (matrix_equiv eq m n).eq ma mb)
(ensures ijth ma i j `eq.eq` ijth mb i j)
(* We can always establish matrix equivalence from element-wise equivalence *)
val matrix_equiv_from_element_eq (#c:_) (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
: Lemma (requires (forall (i: under m) (j: under n). ijth ma i j `eq.eq` ijth mb i j))
(ensures (matrix_equiv eq m n).eq ma mb)
(*
Notice that even though we can (and will) construct CommMonoid for matrix addition,
we still publish the operations as well since as soon as we get to multiplication,
results usually have different dimensions, so it would be convenient to have both
the CommMonoid for matrix addition and the explicit addition function.
This becomes the only way with non-square matrix multiplication, since these
would not constitute a monoid to begin with.
*)
(* This version of the lemma is useful if we don't want to invoke
Classical.forall_intro_2 in a big proof to conserve resources *)
let matrix_equiv_from_proof #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
(proof: (i:under m) -> (j:under n) -> Lemma (eq.eq (ijth ma i j) (ijth mb i j)))
: Lemma ((matrix_equiv eq m n).eq ma mb)
= Classical.forall_intro_2 proof;
matrix_equiv_from_element_eq eq ma mb
(* This one is the generator function for sum of matrices *)
let matrix_add_generator #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_generator c m n = fun i j -> add.mult (ijth ma i j) (ijth mb i j)
(* This is the matrix sum operation given the addition CommMonoid *)
let matrix_add #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_of (matrix_add_generator add ma mb)
= init (matrix_add_generator add ma mb)
(* Sum of matrices ijth element lemma *)
let matrix_add_ijth #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth (matrix_add add ma mb) i j == add.mult (ijth ma i j) (ijth mb i j)) = ()
(* m*n-sized matrix addition CommMonoid *)
val matrix_add_comm_monoid : (#c:Type) ->
(#eq:CE.equiv c) ->
(add: CE.cm c eq) ->
(m:pos) -> (n: pos) ->
CE.cm (matrix c m n) (matrix_equiv eq m n)
(* Sometimes we want matrix rows and columns to be accessed as sequences *)
let col #c #m #n (mx: matrix c m n) (j: under n) = SB.init m (fun (i: under m) -> ijth mx i j)
let row #c #m #n (mx: matrix c m n) (i: under m) = SB.init n (fun (j: under n) -> ijth mx i j)
(* ijth-based and row/col-based element access methods are equivalent *)
val matrix_row_col_lemma (#c:_) (#m #n:pos) (mx: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth mx i j == SB.index (row mx i) j /\ ijth mx i j == SB.index (col mx j) i)
(* This transforms a seq X={Xi} into a seq X={Xi `op` c} *)
let seq_op_const #c #eq (cm: CE.cm c eq) (s: SB.seq c) (const: c)
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult (SB.index s i) const)
(* Well, technically it is the same thing as above, given cm is commutative.
We will still use prefix and postfix applications separately since | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | cm: FStar.Algebra.CommMonoid.Equiv.cm c eq -> const: c -> s: FStar.Seq.Base.seq c
-> FStar.Seq.Base.seq c | Prims.Tot | [
"total"
] | [] | [
"FStar.Algebra.CommMonoid.Equiv.equiv",
"FStar.Algebra.CommMonoid.Equiv.cm",
"FStar.Seq.Base.seq",
"FStar.Seq.Base.init",
"FStar.Seq.Base.length",
"FStar.IntegerIntervals.under",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult",
"FStar.Seq.Base.index"
] | [] | false | false | false | false | false | let const_op_seq #c #eq (cm: CE.cm c eq) (const: c) (s: SB.seq c) =
| SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult const (SB.index s i)) | false |
|
FStar.Matrix.fsti | FStar.Matrix.seq_op_const | val seq_op_const : cm: FStar.Algebra.CommMonoid.Equiv.cm c eq -> s: FStar.Seq.Base.seq c -> const: c
-> FStar.Seq.Base.seq c | let seq_op_const #c #eq (cm: CE.cm c eq) (s: SB.seq c) (const: c)
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult (SB.index s i) const) | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 88,
"end_line": 234,
"start_col": 0,
"start_line": 233
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *)
let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n
let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n
(* A proof that getting a 2D index back from the flattened
index works correctly *)
let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
flattened_index_is_under_flattened_size m n i j; //speeds up the proof
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n
(* A proof that getting the flattened index from 2D
indices works correctly *)
let consistency_of_ij (m n: pos) (ij: under (m*n))
: Lemma (get_ij m n (get_i m n ij) (get_j m n ij) == ij) = ()
(* The transposition transform for the flattened index *)
let transpose_ji (m n: pos) (ij: under (m*n)) : under (n*m) =
flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij);
(get_j m n ij)*m + (get_i m n ij)
(* Auxiliary arithmetic lemma *)
let indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j*m+i)%m=i) && ((j*m+i)/m=j)) = ML.lemma_mod_plus i j m
(* A proof of trasnspotition transform bijectivity *)
let ji_is_transpose_of_ij (m n: pos) (ij: under (m*n))
: Lemma (transpose_ji n m (transpose_ji m n ij) = ij) =
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A proof that 2D indices are swapped with the transpotition transform *)
let dual_indices (m n: pos) (ij: under (m*n)) : Lemma (
(get_j n m (transpose_ji m n ij) = get_i m n ij) /\
(get_i n m (transpose_ji m n ij) = get_j m n ij))
= consistency_of_ij m n ij;
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A matrix can always be treated as a flattened seq *)
val seq_of_matrix : (#c: Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) ->
(s:SB.seq c {
SB.length s=m*n /\
(forall (ij: under (m*n)). SB.index s ij == SB.index s (get_ij m n (get_i m n ij) (get_j m n ij)))
})
(* Indexer for a matrix *)
val ijth : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
(t:c{t == SB.index (seq_of_matrix mx) (get_ij m n i j)})
(* Indexer for a matrix returns the correct value *)
val ijth_lemma : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
Lemma (ijth mx i j == SB.index (seq_of_matrix mx) (get_ij m n i j))
(* A matrix can always be constructed from an m*n-sized seq *)
val matrix_of_seq : (#c: Type) -> (m:pos) -> (n:pos) -> (s: SB.seq c{SB.length s = m*n}) -> matrix c m n
(* A type for matrices constructed via concrete generator *)
type matrix_of #c (#m #n: pos) (gen: matrix_generator c m n) = z:matrix c m n {
(forall (i: under m) (j: under n). ijth z i j == gen i j) /\
(forall (ij: under (m*n)). (SB.index (seq_of_matrix z) ij) == (gen (get_i m n ij) (get_j m n ij)))
}
(* Monoid-based fold of a matrix treated as a flat seq *)
val foldm : (#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n) -> c
(* foldm_snoc of the corresponding seq is equal to foldm of the matrix *)
val matrix_fold_equals_fold_of_seq :
(#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n)
-> Lemma (ensures foldm cm mx `eq.eq` SP.foldm_snoc cm (seq_of_matrix mx)) [SMTPat(foldm cm mx)]
(* A matrix constructed from given generator *)
val init : (#c:Type) -> (#m:pos) -> (#n: pos) -> (generator: matrix_generator c m n)
-> matrix_of generator
(* A matrix fold is equal to double foldm_snoc over init-generated seq of seqs *)
val matrix_fold_equals_fold_of_seq_folds : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (ensures foldm cm (init generator) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
/\ SP.foldm_snoc cm (seq_of_matrix (init generator)) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
)
(* This auxiliary lemma shows that the fold of the last line of a matrix
is equal to the corresponding fold of the generator function *)
(* This lemma establishes that the fold of a matrix is equal to
nested Algebra.CommMonoid.Fold.fold over the matrix generator *)
val matrix_fold_equals_func_double_fold : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (foldm cm (init generator) `eq.eq`
CF.fold cm 0 (m-1) (fun (i:under m) -> CF.fold cm 0 (n-1) (generator i)))
val transposed_matrix_gen (#c:_) (#m:pos) (#n:pos) (generator: matrix_generator c m n)
: (f: matrix_generator c n m { forall i j. f j i == generator i j })
val matrix_transpose_is_permutation (#c:_) (#m #n: pos)
(generator: matrix_generator c m n)
: Lemma (SP.is_permutation (seq_of_matrix (init generator))
(seq_of_matrix (init (transposed_matrix_gen generator)))
(transpose_ji m n))
val matrix_fold_equals_fold_of_transpose (#c:_) (#eq:_)
(#m #n: pos)
(cm: CE.cm c eq)
(gen: matrix_generator c m n)
: Lemma (foldm cm (init gen) `eq.eq`
foldm cm (init (transposed_matrix_gen gen)))
(* The equivalence relation defined for matrices of given dimensions *)
val matrix_equiv : (#c: Type) ->
(eq: CE.equiv c) ->
(m: pos) -> (n: pos) ->
CE.equiv (matrix c m n)
(* element-wise matrix equivalence lemma *)
val matrix_equiv_ijth (#c:_) (#m #n: pos) (eq: CE.equiv c)
(ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (requires (matrix_equiv eq m n).eq ma mb)
(ensures ijth ma i j `eq.eq` ijth mb i j)
(* We can always establish matrix equivalence from element-wise equivalence *)
val matrix_equiv_from_element_eq (#c:_) (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
: Lemma (requires (forall (i: under m) (j: under n). ijth ma i j `eq.eq` ijth mb i j))
(ensures (matrix_equiv eq m n).eq ma mb)
(*
Notice that even though we can (and will) construct CommMonoid for matrix addition,
we still publish the operations as well since as soon as we get to multiplication,
results usually have different dimensions, so it would be convenient to have both
the CommMonoid for matrix addition and the explicit addition function.
This becomes the only way with non-square matrix multiplication, since these
would not constitute a monoid to begin with.
*)
(* This version of the lemma is useful if we don't want to invoke
Classical.forall_intro_2 in a big proof to conserve resources *)
let matrix_equiv_from_proof #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
(proof: (i:under m) -> (j:under n) -> Lemma (eq.eq (ijth ma i j) (ijth mb i j)))
: Lemma ((matrix_equiv eq m n).eq ma mb)
= Classical.forall_intro_2 proof;
matrix_equiv_from_element_eq eq ma mb
(* This one is the generator function for sum of matrices *)
let matrix_add_generator #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_generator c m n = fun i j -> add.mult (ijth ma i j) (ijth mb i j)
(* This is the matrix sum operation given the addition CommMonoid *)
let matrix_add #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_of (matrix_add_generator add ma mb)
= init (matrix_add_generator add ma mb)
(* Sum of matrices ijth element lemma *)
let matrix_add_ijth #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth (matrix_add add ma mb) i j == add.mult (ijth ma i j) (ijth mb i j)) = ()
(* m*n-sized matrix addition CommMonoid *)
val matrix_add_comm_monoid : (#c:Type) ->
(#eq:CE.equiv c) ->
(add: CE.cm c eq) ->
(m:pos) -> (n: pos) ->
CE.cm (matrix c m n) (matrix_equiv eq m n)
(* Sometimes we want matrix rows and columns to be accessed as sequences *)
let col #c #m #n (mx: matrix c m n) (j: under n) = SB.init m (fun (i: under m) -> ijth mx i j)
let row #c #m #n (mx: matrix c m n) (i: under m) = SB.init n (fun (j: under n) -> ijth mx i j)
(* ijth-based and row/col-based element access methods are equivalent *)
val matrix_row_col_lemma (#c:_) (#m #n:pos) (mx: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth mx i j == SB.index (row mx i) j /\ ijth mx i j == SB.index (col mx j) i) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | cm: FStar.Algebra.CommMonoid.Equiv.cm c eq -> s: FStar.Seq.Base.seq c -> const: c
-> FStar.Seq.Base.seq c | Prims.Tot | [
"total"
] | [] | [
"FStar.Algebra.CommMonoid.Equiv.equiv",
"FStar.Algebra.CommMonoid.Equiv.cm",
"FStar.Seq.Base.seq",
"FStar.Seq.Base.init",
"FStar.Seq.Base.length",
"FStar.IntegerIntervals.under",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult",
"FStar.Seq.Base.index"
] | [] | false | false | false | false | false | let seq_op_const #c #eq (cm: CE.cm c eq) (s: SB.seq c) (const: c) =
| SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult (SB.index s i) const) | false |
|
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.valid0 | val valid0 : Prims.bool | let valid0 = true | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 39,
"end_line": 31,
"start_col": 22,
"start_line": 31
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Prims.bool | Prims.Tot | [
"total"
] | [] | [] | [] | false | false | false | true | false | let valid0 =
| true | false |
|
FStar.Matrix.fsti | FStar.Matrix.row | val row : mx: FStar.Matrix.matrix c m n -> i: FStar.IntegerIntervals.under m -> FStar.Seq.Base.seq c | let row #c #m #n (mx: matrix c m n) (i: under m) = SB.init n (fun (j: under n) -> ijth mx i j) | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 94,
"end_line": 226,
"start_col": 0,
"start_line": 226
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *)
let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n
let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n
(* A proof that getting a 2D index back from the flattened
index works correctly *)
let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
flattened_index_is_under_flattened_size m n i j; //speeds up the proof
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n
(* A proof that getting the flattened index from 2D
indices works correctly *)
let consistency_of_ij (m n: pos) (ij: under (m*n))
: Lemma (get_ij m n (get_i m n ij) (get_j m n ij) == ij) = ()
(* The transposition transform for the flattened index *)
let transpose_ji (m n: pos) (ij: under (m*n)) : under (n*m) =
flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij);
(get_j m n ij)*m + (get_i m n ij)
(* Auxiliary arithmetic lemma *)
let indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j*m+i)%m=i) && ((j*m+i)/m=j)) = ML.lemma_mod_plus i j m
(* A proof of trasnspotition transform bijectivity *)
let ji_is_transpose_of_ij (m n: pos) (ij: under (m*n))
: Lemma (transpose_ji n m (transpose_ji m n ij) = ij) =
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A proof that 2D indices are swapped with the transpotition transform *)
let dual_indices (m n: pos) (ij: under (m*n)) : Lemma (
(get_j n m (transpose_ji m n ij) = get_i m n ij) /\
(get_i n m (transpose_ji m n ij) = get_j m n ij))
= consistency_of_ij m n ij;
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A matrix can always be treated as a flattened seq *)
val seq_of_matrix : (#c: Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) ->
(s:SB.seq c {
SB.length s=m*n /\
(forall (ij: under (m*n)). SB.index s ij == SB.index s (get_ij m n (get_i m n ij) (get_j m n ij)))
})
(* Indexer for a matrix *)
val ijth : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
(t:c{t == SB.index (seq_of_matrix mx) (get_ij m n i j)})
(* Indexer for a matrix returns the correct value *)
val ijth_lemma : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
Lemma (ijth mx i j == SB.index (seq_of_matrix mx) (get_ij m n i j))
(* A matrix can always be constructed from an m*n-sized seq *)
val matrix_of_seq : (#c: Type) -> (m:pos) -> (n:pos) -> (s: SB.seq c{SB.length s = m*n}) -> matrix c m n
(* A type for matrices constructed via concrete generator *)
type matrix_of #c (#m #n: pos) (gen: matrix_generator c m n) = z:matrix c m n {
(forall (i: under m) (j: under n). ijth z i j == gen i j) /\
(forall (ij: under (m*n)). (SB.index (seq_of_matrix z) ij) == (gen (get_i m n ij) (get_j m n ij)))
}
(* Monoid-based fold of a matrix treated as a flat seq *)
val foldm : (#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n) -> c
(* foldm_snoc of the corresponding seq is equal to foldm of the matrix *)
val matrix_fold_equals_fold_of_seq :
(#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n)
-> Lemma (ensures foldm cm mx `eq.eq` SP.foldm_snoc cm (seq_of_matrix mx)) [SMTPat(foldm cm mx)]
(* A matrix constructed from given generator *)
val init : (#c:Type) -> (#m:pos) -> (#n: pos) -> (generator: matrix_generator c m n)
-> matrix_of generator
(* A matrix fold is equal to double foldm_snoc over init-generated seq of seqs *)
val matrix_fold_equals_fold_of_seq_folds : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (ensures foldm cm (init generator) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
/\ SP.foldm_snoc cm (seq_of_matrix (init generator)) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
)
(* This auxiliary lemma shows that the fold of the last line of a matrix
is equal to the corresponding fold of the generator function *)
(* This lemma establishes that the fold of a matrix is equal to
nested Algebra.CommMonoid.Fold.fold over the matrix generator *)
val matrix_fold_equals_func_double_fold : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (foldm cm (init generator) `eq.eq`
CF.fold cm 0 (m-1) (fun (i:under m) -> CF.fold cm 0 (n-1) (generator i)))
val transposed_matrix_gen (#c:_) (#m:pos) (#n:pos) (generator: matrix_generator c m n)
: (f: matrix_generator c n m { forall i j. f j i == generator i j })
val matrix_transpose_is_permutation (#c:_) (#m #n: pos)
(generator: matrix_generator c m n)
: Lemma (SP.is_permutation (seq_of_matrix (init generator))
(seq_of_matrix (init (transposed_matrix_gen generator)))
(transpose_ji m n))
val matrix_fold_equals_fold_of_transpose (#c:_) (#eq:_)
(#m #n: pos)
(cm: CE.cm c eq)
(gen: matrix_generator c m n)
: Lemma (foldm cm (init gen) `eq.eq`
foldm cm (init (transposed_matrix_gen gen)))
(* The equivalence relation defined for matrices of given dimensions *)
val matrix_equiv : (#c: Type) ->
(eq: CE.equiv c) ->
(m: pos) -> (n: pos) ->
CE.equiv (matrix c m n)
(* element-wise matrix equivalence lemma *)
val matrix_equiv_ijth (#c:_) (#m #n: pos) (eq: CE.equiv c)
(ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (requires (matrix_equiv eq m n).eq ma mb)
(ensures ijth ma i j `eq.eq` ijth mb i j)
(* We can always establish matrix equivalence from element-wise equivalence *)
val matrix_equiv_from_element_eq (#c:_) (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
: Lemma (requires (forall (i: under m) (j: under n). ijth ma i j `eq.eq` ijth mb i j))
(ensures (matrix_equiv eq m n).eq ma mb)
(*
Notice that even though we can (and will) construct CommMonoid for matrix addition,
we still publish the operations as well since as soon as we get to multiplication,
results usually have different dimensions, so it would be convenient to have both
the CommMonoid for matrix addition and the explicit addition function.
This becomes the only way with non-square matrix multiplication, since these
would not constitute a monoid to begin with.
*)
(* This version of the lemma is useful if we don't want to invoke
Classical.forall_intro_2 in a big proof to conserve resources *)
let matrix_equiv_from_proof #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
(proof: (i:under m) -> (j:under n) -> Lemma (eq.eq (ijth ma i j) (ijth mb i j)))
: Lemma ((matrix_equiv eq m n).eq ma mb)
= Classical.forall_intro_2 proof;
matrix_equiv_from_element_eq eq ma mb
(* This one is the generator function for sum of matrices *)
let matrix_add_generator #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_generator c m n = fun i j -> add.mult (ijth ma i j) (ijth mb i j)
(* This is the matrix sum operation given the addition CommMonoid *)
let matrix_add #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_of (matrix_add_generator add ma mb)
= init (matrix_add_generator add ma mb)
(* Sum of matrices ijth element lemma *)
let matrix_add_ijth #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth (matrix_add add ma mb) i j == add.mult (ijth ma i j) (ijth mb i j)) = ()
(* m*n-sized matrix addition CommMonoid *)
val matrix_add_comm_monoid : (#c:Type) ->
(#eq:CE.equiv c) ->
(add: CE.cm c eq) ->
(m:pos) -> (n: pos) ->
CE.cm (matrix c m n) (matrix_equiv eq m n)
(* Sometimes we want matrix rows and columns to be accessed as sequences *)
let col #c #m #n (mx: matrix c m n) (j: under n) = SB.init m (fun (i: under m) -> ijth mx i j) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mx: FStar.Matrix.matrix c m n -> i: FStar.IntegerIntervals.under m -> FStar.Seq.Base.seq c | Prims.Tot | [
"total"
] | [] | [
"Prims.pos",
"FStar.Matrix.matrix",
"FStar.IntegerIntervals.under",
"FStar.Seq.Base.init",
"FStar.Matrix.ijth",
"FStar.Seq.Base.seq"
] | [] | false | false | false | false | false | let row #c #m #n (mx: matrix c m n) (i: under m) =
| SB.init n (fun (j: under n) -> ijth mx i j) | false |
|
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.valid1 | val valid1 : Prims.bool | let valid1 = true | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 39,
"end_line": 57,
"start_col": 22,
"start_line": 57
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Prims.bool | Prims.Tot | [
"total"
] | [] | [] | [] | false | false | false | true | false | let valid1 =
| true | false |
|
Hacl.Test.HMAC_DRBG.fst | Hacl.Test.HMAC_DRBG.vectors_tmp | val vectors_tmp : Prims.list (((((((Spec.HMAC_DRBG.Test.Vectors.supported_alg * Test.Lowstarize.hex_encoded) *
Test.Lowstarize.hex_encoded) *
Test.Lowstarize.hex_encoded) *
Test.Lowstarize.hex_encoded) *
Test.Lowstarize.hex_encoded) *
(Test.Lowstarize.hex_encoded * Test.Lowstarize.hex_encoded)) *
Test.Lowstarize.hex_encoded) | let vectors_tmp = List.Tot.map
(fun x -> x.a, h x.entropy_input, h x.nonce, h x.personalization_string,
h x.entropy_input_reseed, h x.additional_input_reseed,
(h x.additional_input_1, h x.additional_input_2),
h x.returned_bits)
test_vectors | {
"file_name": "code/tests/Hacl.Test.HMAC_DRBG.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 14,
"end_line": 24,
"start_col": 0,
"start_line": 19
} | module Hacl.Test.HMAC_DRBG
open FStar.HyperStack.ST
open Test.Lowstarize
open Lib.IntTypes
open Hacl.HMAC_DRBG
open Spec.HMAC_DRBG.Test.Vectors
module D = Spec.Hash.Definitions
module L = Test.Lowstarize
module B = LowStar.Buffer
#set-options "--fuel 0 --ifuel 0 --z3rlimit 100"
(* FStar.Reflection only supports up to 8-tuples *) | {
"checked_file": "/",
"dependencies": [
"Test.Lowstarize.fst.checked",
"Spec.HMAC_DRBG.Test.Vectors.fst.checked",
"Spec.HMAC_DRBG.fsti.checked",
"Spec.Hash.Definitions.fst.checked",
"prims.fst.checked",
"LowStar.Printf.fst.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"Hacl.HMAC_DRBG.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Int32.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"C.String.fsti.checked",
"C.Loops.fst.checked",
"C.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Test.HMAC_DRBG.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "Spec.Hash.Definitions",
"short_module": "D"
},
{
"abbrev": false,
"full_module": "Spec.HMAC_DRBG.Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.HMAC_DRBG",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Lowstarize",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Prims.list (((((((Spec.HMAC_DRBG.Test.Vectors.supported_alg * Test.Lowstarize.hex_encoded) *
Test.Lowstarize.hex_encoded) *
Test.Lowstarize.hex_encoded) *
Test.Lowstarize.hex_encoded) *
Test.Lowstarize.hex_encoded) *
(Test.Lowstarize.hex_encoded * Test.Lowstarize.hex_encoded)) *
Test.Lowstarize.hex_encoded) | Prims.Tot | [
"total"
] | [] | [
"FStar.List.Tot.Base.map",
"Spec.HMAC_DRBG.Test.Vectors.vec",
"FStar.Pervasives.Native.tuple8",
"Spec.HMAC_DRBG.Test.Vectors.supported_alg",
"Test.Lowstarize.hex_encoded",
"FStar.Pervasives.Native.tuple2",
"FStar.Pervasives.Native.Mktuple8",
"Spec.HMAC_DRBG.Test.Vectors.__proj__Mkvec__item__a",
"Test.Lowstarize.h",
"Spec.HMAC_DRBG.Test.Vectors.__proj__Mkvec__item__entropy_input",
"Spec.HMAC_DRBG.Test.Vectors.__proj__Mkvec__item__nonce",
"Spec.HMAC_DRBG.Test.Vectors.__proj__Mkvec__item__personalization_string",
"Spec.HMAC_DRBG.Test.Vectors.__proj__Mkvec__item__entropy_input_reseed",
"Spec.HMAC_DRBG.Test.Vectors.__proj__Mkvec__item__additional_input_reseed",
"FStar.Pervasives.Native.Mktuple2",
"Spec.HMAC_DRBG.Test.Vectors.__proj__Mkvec__item__additional_input_1",
"Spec.HMAC_DRBG.Test.Vectors.__proj__Mkvec__item__additional_input_2",
"Spec.HMAC_DRBG.Test.Vectors.__proj__Mkvec__item__returned_bits",
"Spec.HMAC_DRBG.Test.Vectors.test_vectors"
] | [] | false | false | false | true | false | let vectors_tmp =
| List.Tot.map (fun x ->
x.a,
h x.entropy_input,
h x.nonce,
h x.personalization_string,
h x.entropy_input_reseed,
h x.additional_input_reseed,
(h x.additional_input_1, h x.additional_input_2),
h x.returned_bits)
test_vectors | false |
|
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.serialize_nondep_then_upd_left | val serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
)) | val serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
)) | let serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_left s (serialize s1 y);
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x)) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 119,
"end_line": 485,
"start_col": 0,
"start_line": 463
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= ()
let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 =
Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2
let serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
= serialize_tagged_union
s1
fst
(fun x -> serialize_synth p2 (fun y -> (x, y) <: refine_with_tag fst x) s2 (fun (xy: refine_with_tag fst x) -> snd xy) ())
let serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
= ()
let length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
= () | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s1:
LowParse.Spec.Base.serializer p1
{ Mkparser_kind'?.parser_kind_subkind k1 ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong } ->
s2: LowParse.Spec.Base.serializer p2 ->
x: (t1 * t2) ->
y: t1
-> FStar.Pervasives.Lemma
(requires
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s1 y) ==
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s1 (FStar.Pervasives.Native.fst x)))
(ensures
(let s =
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_nondep_then s1 s2) x
in
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s1 y) <= FStar.Seq.Base.length s /\
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_nondep_then s1 s2)
(y,
FStar.Pervasives.Native.snd x) ==
LowParse.Spec.Base.seq_upd_seq s 0 (LowParse.Spec.Base.serialize s1 y))) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"FStar.Pervasives.Native.tuple2",
"FStar.Seq.Properties.lemma_append_inj",
"LowParse.Bytes.byte",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length",
"LowParse.Spec.Base.serialize",
"FStar.Pervasives.Native.fst",
"FStar.Pervasives.Native.snd",
"Prims.unit",
"FStar.Seq.Properties.lemma_split",
"Prims.nat",
"LowParse.Spec.Base.seq_upd_seq_left",
"LowParse.Bytes.bytes",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.nondep_then",
"LowParse.Spec.Combinators.serialize_nondep_then",
"Prims.squash",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.Seq.Base.seq",
"FStar.Pervasives.Native.Mktuple2",
"LowParse.Spec.Base.seq_upd_seq",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong})
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma (requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures
(let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y))) =
| let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_left s (serialize s1 y);
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1)
(Seq.slice s l1 (Seq.length s))
(serialize s1 (fst x))
(serialize s2 (snd x)) | false |
FStar.Matrix.fsti | FStar.Matrix.matrix_mul_unit | val matrix_mul_unit (#c #eq: _) (add mul: CE.cm c eq) (m: _) : matrix c m m | val matrix_mul_unit (#c #eq: _) (add mul: CE.cm c eq) (m: _) : matrix c m m | let matrix_mul_unit #c #eq (add mul: CE.cm c eq) m
: matrix c m m = init (fun i j -> if i=j then mul.unit else add.unit) | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 71,
"end_line": 327,
"start_col": 0,
"start_line": 326
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *)
let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n
let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n
(* A proof that getting a 2D index back from the flattened
index works correctly *)
let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
flattened_index_is_under_flattened_size m n i j; //speeds up the proof
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n
(* A proof that getting the flattened index from 2D
indices works correctly *)
let consistency_of_ij (m n: pos) (ij: under (m*n))
: Lemma (get_ij m n (get_i m n ij) (get_j m n ij) == ij) = ()
(* The transposition transform for the flattened index *)
let transpose_ji (m n: pos) (ij: under (m*n)) : under (n*m) =
flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij);
(get_j m n ij)*m + (get_i m n ij)
(* Auxiliary arithmetic lemma *)
let indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j*m+i)%m=i) && ((j*m+i)/m=j)) = ML.lemma_mod_plus i j m
(* A proof of trasnspotition transform bijectivity *)
let ji_is_transpose_of_ij (m n: pos) (ij: under (m*n))
: Lemma (transpose_ji n m (transpose_ji m n ij) = ij) =
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A proof that 2D indices are swapped with the transpotition transform *)
let dual_indices (m n: pos) (ij: under (m*n)) : Lemma (
(get_j n m (transpose_ji m n ij) = get_i m n ij) /\
(get_i n m (transpose_ji m n ij) = get_j m n ij))
= consistency_of_ij m n ij;
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A matrix can always be treated as a flattened seq *)
val seq_of_matrix : (#c: Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) ->
(s:SB.seq c {
SB.length s=m*n /\
(forall (ij: under (m*n)). SB.index s ij == SB.index s (get_ij m n (get_i m n ij) (get_j m n ij)))
})
(* Indexer for a matrix *)
val ijth : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
(t:c{t == SB.index (seq_of_matrix mx) (get_ij m n i j)})
(* Indexer for a matrix returns the correct value *)
val ijth_lemma : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
Lemma (ijth mx i j == SB.index (seq_of_matrix mx) (get_ij m n i j))
(* A matrix can always be constructed from an m*n-sized seq *)
val matrix_of_seq : (#c: Type) -> (m:pos) -> (n:pos) -> (s: SB.seq c{SB.length s = m*n}) -> matrix c m n
(* A type for matrices constructed via concrete generator *)
type matrix_of #c (#m #n: pos) (gen: matrix_generator c m n) = z:matrix c m n {
(forall (i: under m) (j: under n). ijth z i j == gen i j) /\
(forall (ij: under (m*n)). (SB.index (seq_of_matrix z) ij) == (gen (get_i m n ij) (get_j m n ij)))
}
(* Monoid-based fold of a matrix treated as a flat seq *)
val foldm : (#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n) -> c
(* foldm_snoc of the corresponding seq is equal to foldm of the matrix *)
val matrix_fold_equals_fold_of_seq :
(#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n)
-> Lemma (ensures foldm cm mx `eq.eq` SP.foldm_snoc cm (seq_of_matrix mx)) [SMTPat(foldm cm mx)]
(* A matrix constructed from given generator *)
val init : (#c:Type) -> (#m:pos) -> (#n: pos) -> (generator: matrix_generator c m n)
-> matrix_of generator
(* A matrix fold is equal to double foldm_snoc over init-generated seq of seqs *)
val matrix_fold_equals_fold_of_seq_folds : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (ensures foldm cm (init generator) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
/\ SP.foldm_snoc cm (seq_of_matrix (init generator)) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
)
(* This auxiliary lemma shows that the fold of the last line of a matrix
is equal to the corresponding fold of the generator function *)
(* This lemma establishes that the fold of a matrix is equal to
nested Algebra.CommMonoid.Fold.fold over the matrix generator *)
val matrix_fold_equals_func_double_fold : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (foldm cm (init generator) `eq.eq`
CF.fold cm 0 (m-1) (fun (i:under m) -> CF.fold cm 0 (n-1) (generator i)))
val transposed_matrix_gen (#c:_) (#m:pos) (#n:pos) (generator: matrix_generator c m n)
: (f: matrix_generator c n m { forall i j. f j i == generator i j })
val matrix_transpose_is_permutation (#c:_) (#m #n: pos)
(generator: matrix_generator c m n)
: Lemma (SP.is_permutation (seq_of_matrix (init generator))
(seq_of_matrix (init (transposed_matrix_gen generator)))
(transpose_ji m n))
val matrix_fold_equals_fold_of_transpose (#c:_) (#eq:_)
(#m #n: pos)
(cm: CE.cm c eq)
(gen: matrix_generator c m n)
: Lemma (foldm cm (init gen) `eq.eq`
foldm cm (init (transposed_matrix_gen gen)))
(* The equivalence relation defined for matrices of given dimensions *)
val matrix_equiv : (#c: Type) ->
(eq: CE.equiv c) ->
(m: pos) -> (n: pos) ->
CE.equiv (matrix c m n)
(* element-wise matrix equivalence lemma *)
val matrix_equiv_ijth (#c:_) (#m #n: pos) (eq: CE.equiv c)
(ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (requires (matrix_equiv eq m n).eq ma mb)
(ensures ijth ma i j `eq.eq` ijth mb i j)
(* We can always establish matrix equivalence from element-wise equivalence *)
val matrix_equiv_from_element_eq (#c:_) (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
: Lemma (requires (forall (i: under m) (j: under n). ijth ma i j `eq.eq` ijth mb i j))
(ensures (matrix_equiv eq m n).eq ma mb)
(*
Notice that even though we can (and will) construct CommMonoid for matrix addition,
we still publish the operations as well since as soon as we get to multiplication,
results usually have different dimensions, so it would be convenient to have both
the CommMonoid for matrix addition and the explicit addition function.
This becomes the only way with non-square matrix multiplication, since these
would not constitute a monoid to begin with.
*)
(* This version of the lemma is useful if we don't want to invoke
Classical.forall_intro_2 in a big proof to conserve resources *)
let matrix_equiv_from_proof #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
(proof: (i:under m) -> (j:under n) -> Lemma (eq.eq (ijth ma i j) (ijth mb i j)))
: Lemma ((matrix_equiv eq m n).eq ma mb)
= Classical.forall_intro_2 proof;
matrix_equiv_from_element_eq eq ma mb
(* This one is the generator function for sum of matrices *)
let matrix_add_generator #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_generator c m n = fun i j -> add.mult (ijth ma i j) (ijth mb i j)
(* This is the matrix sum operation given the addition CommMonoid *)
let matrix_add #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_of (matrix_add_generator add ma mb)
= init (matrix_add_generator add ma mb)
(* Sum of matrices ijth element lemma *)
let matrix_add_ijth #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth (matrix_add add ma mb) i j == add.mult (ijth ma i j) (ijth mb i j)) = ()
(* m*n-sized matrix addition CommMonoid *)
val matrix_add_comm_monoid : (#c:Type) ->
(#eq:CE.equiv c) ->
(add: CE.cm c eq) ->
(m:pos) -> (n: pos) ->
CE.cm (matrix c m n) (matrix_equiv eq m n)
(* Sometimes we want matrix rows and columns to be accessed as sequences *)
let col #c #m #n (mx: matrix c m n) (j: under n) = SB.init m (fun (i: under m) -> ijth mx i j)
let row #c #m #n (mx: matrix c m n) (i: under m) = SB.init n (fun (j: under n) -> ijth mx i j)
(* ijth-based and row/col-based element access methods are equivalent *)
val matrix_row_col_lemma (#c:_) (#m #n:pos) (mx: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth mx i j == SB.index (row mx i) j /\ ijth mx i j == SB.index (col mx j) i)
(* This transforms a seq X={Xi} into a seq X={Xi `op` c} *)
let seq_op_const #c #eq (cm: CE.cm c eq) (s: SB.seq c) (const: c)
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult (SB.index s i) const)
(* Well, technically it is the same thing as above, given cm is commutative.
We will still use prefix and postfix applications separately since
sometimes provable equality (==) rather than `eq.eq` comes in handy *)
let const_op_seq #c #eq (cm: CE.cm c eq) (const: c) (s: SB.seq c)
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult const (SB.index s i))
(* We can get a sequence of products (or sums) from two sequences of equal length *)
let seq_of_products #c #eq (mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c {SB.length t == SB.length s})
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> SB.index s i `mul.mult` SB.index t i)
(* As trivial as it seems to be, sometimes this lemma proves to be useful, mostly because
lemma_eq_elim invocation is surprisingly costly resources-wise. *)
val seq_of_products_lemma (#c:_) (#eq:_) (mul: CE.cm c eq)
(s: SB.seq c) (t: SB.seq c {SB.length t == SB.length s})
(r: SB.seq c { SB.equal r (SB.init (SB.length s)
(fun (i: under (SB.length s)) ->
SB.index s i `mul.mult` SB.index t i))})
: Lemma (seq_of_products mul s t == r)
(* The usual dot product of two sequences of equal lengths *)
let dot #c #eq (add mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c{SB.length t == SB.length s})
= SP.foldm_snoc add (seq_of_products mul s t)
val dot_lemma (#c:_) (#eq:_) (add mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c{SB.length t == SB.length s})
: Lemma (dot add mul s t == SP.foldm_snoc add (seq_of_products mul s t))
(* Of course, it would be best to define the matrix product as a convolution,
but we don't have all the necessary framework for that level of generality yet. *)
val matrix_mul (#c:_) (#eq:_) (#m #n #p:pos) (add mul: CE.cm c eq) (mx: matrix c m n) (my: matrix c n p)
: matrix c m p
(* Both distributivity laws hold for matrices as shown below *)
let is_left_distributive #c #eq (mul add: CE.cm c eq) =
forall (x y z: c). mul.mult x (add.mult y z) `eq.eq` add.mult (mul.mult x y) (mul.mult x z)
let is_right_distributive #c #eq (mul add: CE.cm c eq) =
forall (x y z: c). mul.mult (add.mult x y) z `eq.eq` add.mult (mul.mult x z) (mul.mult y z)
let is_fully_distributive #c #eq (mul add: CE.cm c eq) = is_left_distributive mul add /\ is_right_distributive mul add
(*
This definition is of course far more general than matrices, and should rather
be a part of algebra core, as it is relevant to any magma.
In the process of development of F* abstract algebra framework, this definition
will probably take its rightful place near the most basic of grouplike structures.
Also note that this property is defined via forall. We would probably want
to make such properties opaque to SMT in the future, to avoid verification performance
issues.
*)
let is_absorber #c #eq (z:c) (op: CE.cm c eq) =
forall (x:c). op.mult z x `eq.eq` z /\ op.mult x z `eq.eq` z
(*
Similar lemmas to reason about matrix product elements
We're going to refactor these a bit, as some are clearly redundant.
Might want to keep internal usages to one variant of the lemma and
remove the rest.
*)
val matrix_mul_ijth (#c:_) (#eq:_) (#m #n #k:pos) (add mul: CE.cm c eq)
(mx: matrix c m n) (my: matrix c n k) (i: under m) (h: under k)
: Lemma (ijth (matrix_mul add mul mx my) i h == dot add mul (row mx i) (col my h))
val matrix_mul_ijth_as_sum (#c:_) (#eq:_) (#m #n #p:pos) (add mul: CE.cm c eq)
(mx: matrix c m n) (my: matrix c n p) (i: under m) (k: under p)
: Lemma (ijth (matrix_mul add mul mx my) i k ==
SP.foldm_snoc add (SB.init n (fun (j: under n) -> mul.mult (ijth mx i j) (ijth my j k))))
val matrix_mul_ijth_eq_sum_of_seq (#c:_) (#eq:_) (#m #n #p:pos) (add: CE.cm c eq)
(mul: CE.cm c eq{is_fully_distributive mul add /\ is_absorber add.unit mul})
(mx: matrix c m n) (my: matrix c n p) (i: under m) (k: under p)
(r: SB.seq c{r `SB.equal` seq_of_products mul (row mx i) (col my k)})
: Lemma (ijth (matrix_mul add mul mx my) i k == SP.foldm_snoc add r)
val matrix_mul_ijth_eq_sum_of_seq_for_init (#c:_) (#eq:_) (#m #n #p:pos) (add mul: CE.cm c eq)
(mx: matrix c m n) (my: matrix c n p) (i: under m) (k: under p)
(f: under n -> c { SB.init n f `SB.equal` seq_of_products mul (row mx i) (col my k)})
: Lemma (ijth (matrix_mul add mul mx my) i k == SP.foldm_snoc add (SB.init n f))
(* Basically, we prove that (XY)Z = X(YZ) for any matrices of compatible sizes *)
val matrix_mul_is_associative (#c:_) (#eq:_) (#m #n #p #q: pos) (add: CE.cm c eq)
(mul: CE.cm c eq{is_fully_distributive mul add /\ is_absorber add.unit mul})
(mx: matrix c m n) (my: matrix c n p) (mz: matrix c p q)
: Lemma ((matrix_equiv eq m q).eq ((matrix_mul add mul mx my) `matrix_mul add mul` mz)
(matrix_mul add mul mx (matrix_mul add mul my mz))) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
add: FStar.Algebra.CommMonoid.Equiv.cm c eq ->
mul: FStar.Algebra.CommMonoid.Equiv.cm c eq ->
m: Prims.pos
-> FStar.Matrix.matrix c m m | Prims.Tot | [
"total"
] | [] | [
"FStar.Algebra.CommMonoid.Equiv.equiv",
"FStar.Algebra.CommMonoid.Equiv.cm",
"Prims.pos",
"FStar.Matrix.init",
"FStar.IntegerIntervals.under",
"Prims.op_Equality",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__unit",
"Prims.bool",
"FStar.Matrix.matrix"
] | [] | false | false | false | false | false | let matrix_mul_unit #c #eq (add: CE.cm c eq) (mul: CE.cm c eq) m : matrix c m m =
| init (fun i j -> if i = j then mul.unit else add.unit) | false |
FStar.Matrix.fsti | FStar.Matrix.seq_of_products | val seq_of_products : mul: FStar.Algebra.CommMonoid.Equiv.cm c eq ->
s: FStar.Seq.Base.seq c ->
t: FStar.Seq.Base.seq c {FStar.Seq.Base.length t == FStar.Seq.Base.length s}
-> FStar.Seq.Base.seq c | let seq_of_products #c #eq (mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c {SB.length t == SB.length s})
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> SB.index s i `mul.mult` SB.index t i) | {
"file_name": "ulib/FStar.Matrix.fsti",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 96,
"end_line": 245,
"start_col": 0,
"start_line": 244
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
(* This is similar to lambdas passed to FStar.Seq.Base.init *)
type matrix_generator c (m n: pos) = under m -> under n -> c
(* We hide the implementation details of a matrix. *)
val matrix (c:Type u#a) (m n : pos) : Type u#a
(* This lemma asserts the flattened index to be valid
for the flattened matrix seq *)
let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n)
: Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n)
(* Returns the flattened index from 2D indices pair
and the two array dimensions. *)
let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n)
= flattened_index_is_under_flattened_size m n i j; i*n + j
(* The following two functions return the matrix indices from the
flattened index and the two dimensions *)
let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n
let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n
(* A proof that getting a 2D index back from the flattened
index works correctly *)
let consistency_of_i_j (m n: pos) (i: under m) (j: under n)
: Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) =
flattened_index_is_under_flattened_size m n i j; //speeds up the proof
ML.lemma_mod_plus j i n;
ML.lemma_div_plus j i n
(* A proof that getting the flattened index from 2D
indices works correctly *)
let consistency_of_ij (m n: pos) (ij: under (m*n))
: Lemma (get_ij m n (get_i m n ij) (get_j m n ij) == ij) = ()
(* The transposition transform for the flattened index *)
let transpose_ji (m n: pos) (ij: under (m*n)) : under (n*m) =
flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij);
(get_j m n ij)*m + (get_i m n ij)
(* Auxiliary arithmetic lemma *)
let indices_transpose_lemma (m: pos) (i: under m) (j: nat)
: Lemma (((j*m+i)%m=i) && ((j*m+i)/m=j)) = ML.lemma_mod_plus i j m
(* A proof of trasnspotition transform bijectivity *)
let ji_is_transpose_of_ij (m n: pos) (ij: under (m*n))
: Lemma (transpose_ji n m (transpose_ji m n ij) = ij) =
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A proof that 2D indices are swapped with the transpotition transform *)
let dual_indices (m n: pos) (ij: under (m*n)) : Lemma (
(get_j n m (transpose_ji m n ij) = get_i m n ij) /\
(get_i n m (transpose_ji m n ij) = get_j m n ij))
= consistency_of_ij m n ij;
indices_transpose_lemma m (get_i m n ij) (get_j m n ij)
(* A matrix can always be treated as a flattened seq *)
val seq_of_matrix : (#c: Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) ->
(s:SB.seq c {
SB.length s=m*n /\
(forall (ij: under (m*n)). SB.index s ij == SB.index s (get_ij m n (get_i m n ij) (get_j m n ij)))
})
(* Indexer for a matrix *)
val ijth : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
(t:c{t == SB.index (seq_of_matrix mx) (get_ij m n i j)})
(* Indexer for a matrix returns the correct value *)
val ijth_lemma : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) ->
Lemma (ijth mx i j == SB.index (seq_of_matrix mx) (get_ij m n i j))
(* A matrix can always be constructed from an m*n-sized seq *)
val matrix_of_seq : (#c: Type) -> (m:pos) -> (n:pos) -> (s: SB.seq c{SB.length s = m*n}) -> matrix c m n
(* A type for matrices constructed via concrete generator *)
type matrix_of #c (#m #n: pos) (gen: matrix_generator c m n) = z:matrix c m n {
(forall (i: under m) (j: under n). ijth z i j == gen i j) /\
(forall (ij: under (m*n)). (SB.index (seq_of_matrix z) ij) == (gen (get_i m n ij) (get_j m n ij)))
}
(* Monoid-based fold of a matrix treated as a flat seq *)
val foldm : (#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n) -> c
(* foldm_snoc of the corresponding seq is equal to foldm of the matrix *)
val matrix_fold_equals_fold_of_seq :
(#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n)
-> Lemma (ensures foldm cm mx `eq.eq` SP.foldm_snoc cm (seq_of_matrix mx)) [SMTPat(foldm cm mx)]
(* A matrix constructed from given generator *)
val init : (#c:Type) -> (#m:pos) -> (#n: pos) -> (generator: matrix_generator c m n)
-> matrix_of generator
(* A matrix fold is equal to double foldm_snoc over init-generated seq of seqs *)
val matrix_fold_equals_fold_of_seq_folds : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (ensures foldm cm (init generator) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
/\ SP.foldm_snoc cm (seq_of_matrix (init generator)) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
)
(* This auxiliary lemma shows that the fold of the last line of a matrix
is equal to the corresponding fold of the generator function *)
(* This lemma establishes that the fold of a matrix is equal to
nested Algebra.CommMonoid.Fold.fold over the matrix generator *)
val matrix_fold_equals_func_double_fold : (#c:Type) -> (#eq: CE.equiv c) ->
(#m: pos) -> (#n: pos) ->
(cm: CE.cm c eq) ->
(generator: matrix_generator c m n) ->
Lemma (foldm cm (init generator) `eq.eq`
CF.fold cm 0 (m-1) (fun (i:under m) -> CF.fold cm 0 (n-1) (generator i)))
val transposed_matrix_gen (#c:_) (#m:pos) (#n:pos) (generator: matrix_generator c m n)
: (f: matrix_generator c n m { forall i j. f j i == generator i j })
val matrix_transpose_is_permutation (#c:_) (#m #n: pos)
(generator: matrix_generator c m n)
: Lemma (SP.is_permutation (seq_of_matrix (init generator))
(seq_of_matrix (init (transposed_matrix_gen generator)))
(transpose_ji m n))
val matrix_fold_equals_fold_of_transpose (#c:_) (#eq:_)
(#m #n: pos)
(cm: CE.cm c eq)
(gen: matrix_generator c m n)
: Lemma (foldm cm (init gen) `eq.eq`
foldm cm (init (transposed_matrix_gen gen)))
(* The equivalence relation defined for matrices of given dimensions *)
val matrix_equiv : (#c: Type) ->
(eq: CE.equiv c) ->
(m: pos) -> (n: pos) ->
CE.equiv (matrix c m n)
(* element-wise matrix equivalence lemma *)
val matrix_equiv_ijth (#c:_) (#m #n: pos) (eq: CE.equiv c)
(ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (requires (matrix_equiv eq m n).eq ma mb)
(ensures ijth ma i j `eq.eq` ijth mb i j)
(* We can always establish matrix equivalence from element-wise equivalence *)
val matrix_equiv_from_element_eq (#c:_) (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
: Lemma (requires (forall (i: under m) (j: under n). ijth ma i j `eq.eq` ijth mb i j))
(ensures (matrix_equiv eq m n).eq ma mb)
(*
Notice that even though we can (and will) construct CommMonoid for matrix addition,
we still publish the operations as well since as soon as we get to multiplication,
results usually have different dimensions, so it would be convenient to have both
the CommMonoid for matrix addition and the explicit addition function.
This becomes the only way with non-square matrix multiplication, since these
would not constitute a monoid to begin with.
*)
(* This version of the lemma is useful if we don't want to invoke
Classical.forall_intro_2 in a big proof to conserve resources *)
let matrix_equiv_from_proof #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
(proof: (i:under m) -> (j:under n) -> Lemma (eq.eq (ijth ma i j) (ijth mb i j)))
: Lemma ((matrix_equiv eq m n).eq ma mb)
= Classical.forall_intro_2 proof;
matrix_equiv_from_element_eq eq ma mb
(* This one is the generator function for sum of matrices *)
let matrix_add_generator #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_generator c m n = fun i j -> add.mult (ijth ma i j) (ijth mb i j)
(* This is the matrix sum operation given the addition CommMonoid *)
let matrix_add #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: matrix_of (matrix_add_generator add ma mb)
= init (matrix_add_generator add ma mb)
(* Sum of matrices ijth element lemma *)
let matrix_add_ijth #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth (matrix_add add ma mb) i j == add.mult (ijth ma i j) (ijth mb i j)) = ()
(* m*n-sized matrix addition CommMonoid *)
val matrix_add_comm_monoid : (#c:Type) ->
(#eq:CE.equiv c) ->
(add: CE.cm c eq) ->
(m:pos) -> (n: pos) ->
CE.cm (matrix c m n) (matrix_equiv eq m n)
(* Sometimes we want matrix rows and columns to be accessed as sequences *)
let col #c #m #n (mx: matrix c m n) (j: under n) = SB.init m (fun (i: under m) -> ijth mx i j)
let row #c #m #n (mx: matrix c m n) (i: under m) = SB.init n (fun (j: under n) -> ijth mx i j)
(* ijth-based and row/col-based element access methods are equivalent *)
val matrix_row_col_lemma (#c:_) (#m #n:pos) (mx: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth mx i j == SB.index (row mx i) j /\ ijth mx i j == SB.index (col mx j) i)
(* This transforms a seq X={Xi} into a seq X={Xi `op` c} *)
let seq_op_const #c #eq (cm: CE.cm c eq) (s: SB.seq c) (const: c)
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult (SB.index s i) const)
(* Well, technically it is the same thing as above, given cm is commutative.
We will still use prefix and postfix applications separately since
sometimes provable equality (==) rather than `eq.eq` comes in handy *)
let const_op_seq #c #eq (cm: CE.cm c eq) (const: c) (s: SB.seq c)
= SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult const (SB.index s i)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Matrix.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
mul: FStar.Algebra.CommMonoid.Equiv.cm c eq ->
s: FStar.Seq.Base.seq c ->
t: FStar.Seq.Base.seq c {FStar.Seq.Base.length t == FStar.Seq.Base.length s}
-> FStar.Seq.Base.seq c | Prims.Tot | [
"total"
] | [] | [
"FStar.Algebra.CommMonoid.Equiv.equiv",
"FStar.Algebra.CommMonoid.Equiv.cm",
"FStar.Seq.Base.seq",
"Prims.eq2",
"Prims.nat",
"FStar.Seq.Base.length",
"FStar.Seq.Base.init",
"FStar.IntegerIntervals.under",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult",
"FStar.Seq.Base.index"
] | [] | false | false | false | false | false | let seq_of_products
#c
#eq
(mul: CE.cm c eq)
(s: SB.seq c)
(t: SB.seq c {SB.length t == SB.length s})
=
| SB.init (SB.length s) (fun (i: under (SB.length s)) -> (SB.index s i) `mul.mult` (SB.index t i)) | false |
|
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.valid2 | val valid2 : Prims.bool | let valid2 = true | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 39,
"end_line": 83,
"start_col": 22,
"start_line": 83
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Prims.bool | Prims.Tot | [
"total"
] | [] | [] | [] | false | false | false | true | false | let valid2 =
| true | false |
|
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.valid3 | val valid3 : Prims.bool | let valid3 = true | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 39,
"end_line": 109,
"start_col": 22,
"start_line": 109
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Prims.bool | Prims.Tot | [
"total"
] | [] | [] | [] | false | false | false | true | false | let valid3 =
| true | false |
|
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.private_1_len | val private_1_len:(x: UInt32.t{UInt32.v x = B.length private_1}) | val private_1_len:(x: UInt32.t{UInt32.v x = B.length private_1}) | let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 39,
"start_col": 22,
"start_line": 38
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.private_1} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let private_1_len:(x: UInt32.t{UInt32.v x = B.length private_1}) =
| 32ul | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.public0 | val public0:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | val public0:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 18,
"start_col": 0,
"start_line": 15
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 32 /\ LowStar.Monotonic.Buffer.recallable b} | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | false | false | false | false | false | let public0:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) =
| [@@ inline_let ]let l =
[
0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy;
0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy;
0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy
]
in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.private_0_len | val private_0_len:(x: UInt32.t{UInt32.v x = B.length private_0}) | val private_0_len:(x: UInt32.t{UInt32.v x = B.length private_0}) | let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 13,
"start_col": 22,
"start_line": 12
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.private_0} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let private_0_len:(x: UInt32.t{UInt32.v x = B.length private_0}) =
| 32ul | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.serialize_nondep_then_upd_right | val serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
)) | val serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
)) | let serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_right s (serialize s2 y);
let l2 = Seq.length s - Seq.length (serialize s2 (snd x)) in
Seq.lemma_split s l2;
Seq.lemma_append_inj (Seq.slice s 0 l2) (Seq.slice s l2 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x)) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 119,
"end_line": 594,
"start_col": 0,
"start_line": 572
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= ()
let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 =
Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2
let serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
= serialize_tagged_union
s1
fst
(fun x -> serialize_synth p2 (fun y -> (x, y) <: refine_with_tag fst x) s2 (fun (xy: refine_with_tag fst x) -> snd xy) ())
let serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
= ()
let length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
= ()
let serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_left s (serialize s1 y);
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x))
let serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
= serialize_nondep_then_upd_left s1 s2 x y;
let s = serialize (serialize_nondep_then s1 s2) x in
let s1' = serialize s1 (fst x) in
let l1 = Seq.length s1' in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) s1' (serialize s2 (snd x));
seq_upd_seq_right_to_left s 0 s1' i' s';
seq_upd_seq_slice_idem s 0 (Seq.length s1')
let serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
= serialize_nondep_then_upd_left s1 s2 x y
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
= let j' = Seq.length (serialize s1 (fst x)) - i' - Seq.length s' in
serialize_nondep_then_upd_left_chain s1 s2 x y j' s';
assert (j' == Seq.length (serialize (serialize_nondep_then s1 s2) x) - (Seq.length (serialize s2 (snd x)) + i') - Seq.length s') | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [
"smt.arith.nl=false"
],
"z3refresh": true,
"z3rlimit": 64,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s1:
LowParse.Spec.Base.serializer p1
{ Mkparser_kind'?.parser_kind_subkind k1 ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong } ->
s2: LowParse.Spec.Base.serializer p2 ->
x: (t1 * t2) ->
y: t2
-> FStar.Pervasives.Lemma
(requires
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s2 y) ==
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s2 (FStar.Pervasives.Native.snd x)))
(ensures
(let s =
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_nondep_then s1 s2) x
in
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s2 y) <= FStar.Seq.Base.length s /\
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_nondep_then s1 s2)
(FStar.Pervasives.Native.fst x,
y) ==
LowParse.Spec.Base.seq_upd_seq s
(FStar.Seq.Base.length s - FStar.Seq.Base.length (LowParse.Spec.Base.serialize s2 y))
(LowParse.Spec.Base.serialize s2 y))) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"FStar.Pervasives.Native.tuple2",
"FStar.Seq.Properties.lemma_append_inj",
"LowParse.Bytes.byte",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length",
"LowParse.Spec.Base.serialize",
"FStar.Pervasives.Native.fst",
"FStar.Pervasives.Native.snd",
"Prims.unit",
"FStar.Seq.Properties.lemma_split",
"Prims.int",
"Prims.op_Subtraction",
"LowParse.Spec.Base.seq_upd_seq_right",
"LowParse.Bytes.bytes",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.nondep_then",
"LowParse.Spec.Combinators.serialize_nondep_then",
"Prims.nat",
"Prims.squash",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.Seq.Base.seq",
"FStar.Pervasives.Native.Mktuple2",
"LowParse.Spec.Base.seq_upd_seq",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong})
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma (requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures
(let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) ==
seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y))) =
| let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_right s (serialize s2 y);
let l2 = Seq.length s - Seq.length (serialize s2 (snd x)) in
Seq.lemma_split s l2;
Seq.lemma_append_inj (Seq.slice s 0 l2)
(Seq.slice s l2 (Seq.length s))
(serialize s1 (fst x))
(serialize s2 (snd x)) | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.private_0 | val private_0:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | val private_0:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 10,
"start_col": 0,
"start_line": 7
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0" | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 32 /\ LowStar.Monotonic.Buffer.recallable b} | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | false | false | false | false | false | let private_0:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) =
| [@@ inline_let ]let l =
[
0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy;
0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy;
0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy
]
in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | false |
Hacl.P256.PrecompTable.fst | Hacl.P256.PrecompTable.precomp_g_pow2_128_table_w4 | val precomp_g_pow2_128_table_w4:
x:glbuffer uint64 192ul{witnessed x precomp_g_pow2_128_table_lseq_w4 /\ recallable x} | val precomp_g_pow2_128_table_w4:
x:glbuffer uint64 192ul{witnessed x precomp_g_pow2_128_table_lseq_w4 /\ recallable x} | let precomp_g_pow2_128_table_w4:
x:glbuffer uint64 192ul{witnessed x precomp_g_pow2_128_table_lseq_w4 /\ recallable x} =
createL_global precomp_g_pow2_128_table_list_w4 | {
"file_name": "code/ecdsap256/Hacl.P256.PrecompTable.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 49,
"end_line": 255,
"start_col": 0,
"start_line": 253
} | module Hacl.P256.PrecompTable
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
module ST = FStar.HyperStack.ST
module LSeq = Lib.Sequence
module LE = Lib.Exponentiation
module SE = Spec.Exponentiation
module SPT = Hacl.Spec.PrecompBaseTable
module SPT256 = Hacl.Spec.PrecompBaseTable256
module SPTK = Hacl.Spec.P256.PrecompTable
module S = Spec.P256
module SL = Spec.P256.Lemmas
open Hacl.Impl.P256.Point
include Hacl.Impl.P256.Group
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
let proj_point_to_list p =
SPTK.proj_point_to_list_lemma p;
SPTK.proj_point_to_list p
let lemma_refl x =
SPTK.proj_point_to_list_lemma x
//-----------------
inline_for_extraction noextract
let proj_g_pow2_64 : S.proj_point =
[@inline_let]
let rX : S.felem = 0x000931f4ae428a4ad81ee0aa89cf5247ce85d4dd696c61b4bb9d4761e57b7fbe in
[@inline_let]
let rY : S.felem = 0x7e88e5e6a142d5c2269f21a158e82ab2c79fcecb26e397b96fd5b9fbcd0a69a5 in
[@inline_let]
let rZ : S.felem = 0x02626dc2dd5e06cd19de5e6afb6c5dbdd3e41dc1472e7b8ef11eb0662e41c44b in
(rX, rY, rZ)
val lemma_proj_g_pow2_64_eval : unit ->
Lemma (SE.exp_pow2 S.mk_p256_concrete_ops S.base_point 64 == proj_g_pow2_64)
let lemma_proj_g_pow2_64_eval () =
SPT256.exp_pow2_rec_is_exp_pow2 S.mk_p256_concrete_ops S.base_point 64;
let qX, qY, qZ = normalize_term (SPT256.exp_pow2_rec S.mk_p256_concrete_ops S.base_point 64) in
normalize_term_spec (SPT256.exp_pow2_rec S.mk_p256_concrete_ops S.base_point 64);
let rX : S.felem = 0x000931f4ae428a4ad81ee0aa89cf5247ce85d4dd696c61b4bb9d4761e57b7fbe in
let rY : S.felem = 0x7e88e5e6a142d5c2269f21a158e82ab2c79fcecb26e397b96fd5b9fbcd0a69a5 in
let rZ : S.felem = 0x02626dc2dd5e06cd19de5e6afb6c5dbdd3e41dc1472e7b8ef11eb0662e41c44b in
assert_norm (qX == rX /\ qY == rY /\ qZ == rZ)
inline_for_extraction noextract
let proj_g_pow2_128 : S.proj_point =
[@inline_let]
let rX : S.felem = 0x04c3aaf6c6c00704e96eda89461d63fd2c97ee1e6786fc785e6afac7aa92f9b1 in
[@inline_let]
let rY : S.felem = 0x14f1edaeb8e9c8d4797d164a3946c7ff50a7c8cd59139a4dbce354e6e4df09c3 in
[@inline_let]
let rZ : S.felem = 0x80119ced9a5ce83c4e31f8de1a38f89d5f9ff9f637dca86d116a4217f83e55d2 in
(rX, rY, rZ)
val lemma_proj_g_pow2_128_eval : unit ->
Lemma (SE.exp_pow2 S.mk_p256_concrete_ops proj_g_pow2_64 64 == proj_g_pow2_128)
let lemma_proj_g_pow2_128_eval () =
SPT256.exp_pow2_rec_is_exp_pow2 S.mk_p256_concrete_ops proj_g_pow2_64 64;
let qX, qY, qZ = normalize_term (SPT256.exp_pow2_rec S.mk_p256_concrete_ops proj_g_pow2_64 64) in
normalize_term_spec (SPT256.exp_pow2_rec S.mk_p256_concrete_ops proj_g_pow2_64 64);
let rX : S.felem = 0x04c3aaf6c6c00704e96eda89461d63fd2c97ee1e6786fc785e6afac7aa92f9b1 in
let rY : S.felem = 0x14f1edaeb8e9c8d4797d164a3946c7ff50a7c8cd59139a4dbce354e6e4df09c3 in
let rZ : S.felem = 0x80119ced9a5ce83c4e31f8de1a38f89d5f9ff9f637dca86d116a4217f83e55d2 in
assert_norm (qX == rX /\ qY == rY /\ qZ == rZ)
inline_for_extraction noextract
let proj_g_pow2_192 : S.proj_point =
[@inline_let]
let rX : S.felem = 0xc762a9c8ae1b2f7434ff8da70fe105e0d4f188594989f193de0dbdbf5f60cb9a in
[@inline_let]
let rY : S.felem = 0x1eddaf51836859e1369f1ae8d9ab02e4123b6f151d9b796e297a38fa5613d9bc in
[@inline_let]
let rZ : S.felem = 0xcb433ab3f67815707e398dc7910cc4ec6ea115360060fc73c35b53dce02e2c72 in
(rX, rY, rZ)
val lemma_proj_g_pow2_192_eval : unit ->
Lemma (SE.exp_pow2 S.mk_p256_concrete_ops proj_g_pow2_128 64 == proj_g_pow2_192)
let lemma_proj_g_pow2_192_eval () =
SPT256.exp_pow2_rec_is_exp_pow2 S.mk_p256_concrete_ops proj_g_pow2_128 64;
let qX, qY, qZ = normalize_term (SPT256.exp_pow2_rec S.mk_p256_concrete_ops proj_g_pow2_128 64) in
normalize_term_spec (SPT256.exp_pow2_rec S.mk_p256_concrete_ops proj_g_pow2_128 64);
let rX : S.felem = 0xc762a9c8ae1b2f7434ff8da70fe105e0d4f188594989f193de0dbdbf5f60cb9a in
let rY : S.felem = 0x1eddaf51836859e1369f1ae8d9ab02e4123b6f151d9b796e297a38fa5613d9bc in
let rZ : S.felem = 0xcb433ab3f67815707e398dc7910cc4ec6ea115360060fc73c35b53dce02e2c72 in
assert_norm (qX == rX /\ qY == rY /\ qZ == rZ)
// let proj_g_pow2_64 : S.proj_point =
// normalize_term (SPT256.exp_pow2_rec S.mk_p256_concrete_ops S.base_point 64)
// let proj_g_pow2_128 : S.proj_point =
// normalize_term (SPT256.exp_pow2_rec S.mk_p256_concrete_ops proj_g_pow2_64 64)
// let proj_g_pow2_192 : S.proj_point =
// normalize_term (SPT256.exp_pow2_rec S.mk_p256_concrete_ops proj_g_pow2_128 64)
inline_for_extraction noextract
let proj_g_pow2_64_list : SPTK.point_list =
normalize_term (SPTK.proj_point_to_list proj_g_pow2_64)
inline_for_extraction noextract
let proj_g_pow2_128_list : SPTK.point_list =
normalize_term (SPTK.proj_point_to_list proj_g_pow2_128)
inline_for_extraction noextract
let proj_g_pow2_192_list : SPTK.point_list =
normalize_term (SPTK.proj_point_to_list proj_g_pow2_192)
let proj_g_pow2_64_lseq : LSeq.lseq uint64 12 =
normalize_term_spec (SPTK.proj_point_to_list proj_g_pow2_64);
Seq.seq_of_list proj_g_pow2_64_list
let proj_g_pow2_128_lseq : LSeq.lseq uint64 12 =
normalize_term_spec (SPTK.proj_point_to_list proj_g_pow2_128);
Seq.seq_of_list proj_g_pow2_128_list
let proj_g_pow2_192_lseq : LSeq.lseq uint64 12 =
normalize_term_spec (SPTK.proj_point_to_list proj_g_pow2_192);
Seq.seq_of_list proj_g_pow2_192_list
val proj_g_pow2_64_lemma: unit ->
Lemma (S.to_aff_point proj_g_pow2_64 == pow_point (pow2 64) g_aff)
let proj_g_pow2_64_lemma () =
lemma_proj_g_pow2_64_eval ();
SPT256.a_pow2_64_lemma S.mk_p256_concrete_ops S.base_point
val proj_g_pow2_128_lemma: unit ->
Lemma (S.to_aff_point proj_g_pow2_128 == pow_point (pow2 128) g_aff)
let proj_g_pow2_128_lemma () =
lemma_proj_g_pow2_128_eval ();
lemma_proj_g_pow2_64_eval ();
SPT256.a_pow2_128_lemma S.mk_p256_concrete_ops S.base_point
val proj_g_pow2_192_lemma: unit ->
Lemma (S.to_aff_point proj_g_pow2_192 == pow_point (pow2 192) g_aff)
let proj_g_pow2_192_lemma () =
lemma_proj_g_pow2_192_eval ();
lemma_proj_g_pow2_128_eval ();
lemma_proj_g_pow2_64_eval ();
SPT256.a_pow2_192_lemma S.mk_p256_concrete_ops S.base_point
let proj_g_pow2_64_lseq_lemma () =
normalize_term_spec (SPTK.proj_point_to_list proj_g_pow2_64);
proj_g_pow2_64_lemma ();
SPTK.proj_point_to_list_lemma proj_g_pow2_64
let proj_g_pow2_128_lseq_lemma () =
normalize_term_spec (SPTK.proj_point_to_list proj_g_pow2_128);
proj_g_pow2_128_lemma ();
SPTK.proj_point_to_list_lemma proj_g_pow2_128
let proj_g_pow2_192_lseq_lemma () =
normalize_term_spec (SPTK.proj_point_to_list proj_g_pow2_192);
proj_g_pow2_192_lemma ();
SPTK.proj_point_to_list_lemma proj_g_pow2_192
let mk_proj_g_pow2_64 () =
createL proj_g_pow2_64_list
let mk_proj_g_pow2_128 () =
createL proj_g_pow2_128_list
let mk_proj_g_pow2_192 () =
createL proj_g_pow2_192_list
//----------------
/// window size = 4; precomputed table = [[0]G, [1]G, ..., [15]G]
inline_for_extraction noextract
let precomp_basepoint_table_list_w4: x:list uint64{FStar.List.Tot.length x = 192} =
normalize_term (SPT.precomp_base_table_list mk_p256_precomp_base_table S.base_point 15)
let precomp_basepoint_table_lseq_w4 : LSeq.lseq uint64 192 =
normalize_term_spec (SPT.precomp_base_table_list mk_p256_precomp_base_table S.base_point 15);
Seq.seq_of_list precomp_basepoint_table_list_w4
let precomp_basepoint_table_lemma_w4 () =
normalize_term_spec (SPT.precomp_base_table_list mk_p256_precomp_base_table S.base_point 15);
SPT.precomp_base_table_lemma mk_p256_precomp_base_table S.base_point 16 precomp_basepoint_table_lseq_w4
let precomp_basepoint_table_w4:
x:glbuffer uint64 192ul{witnessed x precomp_basepoint_table_lseq_w4 /\ recallable x} =
createL_global precomp_basepoint_table_list_w4
/// window size = 4; precomputed table = [[0]([pow2 64]G), [1]([pow2 64]G), ..., [15]([pow2 64]G)]
inline_for_extraction noextract
let precomp_g_pow2_64_table_list_w4: x:list uint64{FStar.List.Tot.length x = 192} =
normalize_term (SPT.precomp_base_table_list mk_p256_precomp_base_table proj_g_pow2_64 15)
let precomp_g_pow2_64_table_lseq_w4 : LSeq.lseq uint64 192 =
normalize_term_spec (SPT.precomp_base_table_list mk_p256_precomp_base_table proj_g_pow2_64 15);
Seq.seq_of_list precomp_g_pow2_64_table_list_w4
let precomp_g_pow2_64_table_lemma_w4 () =
normalize_term_spec (SPT.precomp_base_table_list mk_p256_precomp_base_table proj_g_pow2_64 15);
SPT.precomp_base_table_lemma mk_p256_precomp_base_table
proj_g_pow2_64 16 precomp_g_pow2_64_table_lseq_w4;
proj_g_pow2_64_lemma ()
let precomp_g_pow2_64_table_w4:
x:glbuffer uint64 192ul{witnessed x precomp_g_pow2_64_table_lseq_w4 /\ recallable x} =
createL_global precomp_g_pow2_64_table_list_w4
/// window size = 4; precomputed table = [[0]([pow2 128]G), [1]([pow2 128]G),...,[15]([pow2 128]G)]
inline_for_extraction noextract
let precomp_g_pow2_128_table_list_w4: x:list uint64{FStar.List.Tot.length x = 192} =
normalize_term (SPT.precomp_base_table_list mk_p256_precomp_base_table proj_g_pow2_128 15)
let precomp_g_pow2_128_table_lseq_w4 : LSeq.lseq uint64 192 =
normalize_term_spec (SPT.precomp_base_table_list mk_p256_precomp_base_table proj_g_pow2_128 15);
Seq.seq_of_list precomp_g_pow2_128_table_list_w4
let precomp_g_pow2_128_table_lemma_w4 () =
normalize_term_spec (SPT.precomp_base_table_list mk_p256_precomp_base_table proj_g_pow2_128 15);
SPT.precomp_base_table_lemma mk_p256_precomp_base_table
proj_g_pow2_128 16 precomp_g_pow2_64_table_lseq_w4;
proj_g_pow2_128_lemma () | {
"checked_file": "/",
"dependencies": [
"Spec.P256.Lemmas.fsti.checked",
"Spec.P256.fst.checked",
"Spec.Exponentiation.fsti.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.PrecompBaseTable256.fsti.checked",
"Hacl.Spec.PrecompBaseTable.fsti.checked",
"Hacl.Spec.P256.PrecompTable.fsti.checked",
"Hacl.Impl.P256.Point.fsti.checked",
"Hacl.Impl.P256.Group.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": true,
"source_file": "Hacl.P256.PrecompTable.fst"
} | [
{
"abbrev": true,
"full_module": "Spec.P256.Lemmas",
"short_module": "SL"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.P256.PrecompTable",
"short_module": "SPTK"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.PrecompBaseTable256",
"short_module": "SPT256"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "LE"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.P256.Group",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.P256.Point",
"short_module": null
},
{
"abbrev": true,
"full_module": "Hacl.Spec.P256.Montgomery",
"short_module": "SM"
},
{
"abbrev": true,
"full_module": "Spec.P256",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.PrecompBaseTable",
"short_module": "SPT"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Exponentiation.Definitions",
"short_module": "BE"
},
{
"abbrev": true,
"full_module": "Spec.Exponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation.Definition",
"short_module": "LE"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.P256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.P256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
Lib.Buffer.glbuffer Lib.IntTypes.uint64 192ul
{ Lib.Buffer.witnessed x Hacl.P256.PrecompTable.precomp_g_pow2_128_table_lseq_w4 /\
Lib.Buffer.recallable x } | Prims.Tot | [
"total"
] | [] | [
"Lib.Buffer.createL_global",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U64",
"Lib.IntTypes.SEC",
"Hacl.P256.PrecompTable.precomp_g_pow2_128_table_list_w4",
"Lib.Buffer.glbuffer",
"Lib.IntTypes.size",
"FStar.Pervasives.normalize_term",
"Lib.IntTypes.size_nat",
"FStar.List.Tot.Base.length",
"Lib.IntTypes.uint64",
"FStar.UInt32.__uint_to_t",
"Prims.l_and",
"Lib.Buffer.witnessed",
"Hacl.P256.PrecompTable.precomp_g_pow2_128_table_lseq_w4",
"Lib.Buffer.recallable",
"Lib.Buffer.CONST"
] | [] | false | false | false | false | false | let precomp_g_pow2_128_table_w4:x:
glbuffer uint64 192ul {witnessed x precomp_g_pow2_128_table_lseq_w4 /\ recallable x} =
| createL_global precomp_g_pow2_128_table_list_w4 | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.private_1 | val private_1:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | val private_1:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 36,
"start_col": 0,
"start_line": 33
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 32 /\ LowStar.Monotonic.Buffer.recallable b} | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | false | false | false | false | false | let private_1:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) =
| [@@ inline_let ]let l =
[
0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy;
0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy;
0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy
]
in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.valid4 | val valid4 : Prims.bool | let valid4 = true | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 39,
"end_line": 135,
"start_col": 22,
"start_line": 135
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul
inline_for_extraction let valid3 = true
let private_4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy; 0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy; 0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_4_len: (x:UInt32.t { UInt32.v x = B.length private_4 }) =
32ul
let public4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe6uy; 0xdbuy; 0x68uy; 0x67uy; 0x58uy; 0x30uy; 0x30uy; 0xdbuy; 0x35uy; 0x94uy; 0xc1uy; 0xa4uy; 0x24uy; 0xb1uy; 0x5fuy; 0x7cuy; 0x72uy; 0x66uy; 0x24uy; 0xecuy; 0x26uy; 0xb3uy; 0x35uy; 0x3buy; 0x10uy; 0xa9uy; 0x03uy; 0xa6uy; 0xd0uy; 0xabuy; 0x1cuy; 0x4cuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public4_len: (x:UInt32.t { UInt32.v x = B.length public4 }) =
32ul
let result4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xc3uy; 0xdauy; 0x55uy; 0x37uy; 0x9duy; 0xe9uy; 0xc6uy; 0x90uy; 0x8euy; 0x94uy; 0xeauy; 0x4duy; 0xf2uy; 0x8duy; 0x08uy; 0x4fuy; 0x32uy; 0xecuy; 0xcfuy; 0x03uy; 0x49uy; 0x1cuy; 0x71uy; 0xf7uy; 0x54uy; 0xb4uy; 0x07uy; 0x55uy; 0x77uy; 0xa2uy; 0x85uy; 0x52uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result4_len: (x:UInt32.t { UInt32.v x = B.length result4 }) =
32ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Prims.bool | Prims.Tot | [
"total"
] | [] | [] | [] | false | false | false | true | false | let valid4 =
| true | false |
|
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.public1 | val public1:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | val public1:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 44,
"start_col": 0,
"start_line": 41
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 32 /\ LowStar.Monotonic.Buffer.recallable b} | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | false | false | false | false | false | let public1:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) =
| [@@ inline_let ]let l =
[
0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy;
0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy;
0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy
]
in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.valid5 | val valid5 : Prims.bool | let valid5 = false | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 40,
"end_line": 161,
"start_col": 22,
"start_line": 161
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul
inline_for_extraction let valid3 = true
let private_4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy; 0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy; 0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_4_len: (x:UInt32.t { UInt32.v x = B.length private_4 }) =
32ul
let public4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe6uy; 0xdbuy; 0x68uy; 0x67uy; 0x58uy; 0x30uy; 0x30uy; 0xdbuy; 0x35uy; 0x94uy; 0xc1uy; 0xa4uy; 0x24uy; 0xb1uy; 0x5fuy; 0x7cuy; 0x72uy; 0x66uy; 0x24uy; 0xecuy; 0x26uy; 0xb3uy; 0x35uy; 0x3buy; 0x10uy; 0xa9uy; 0x03uy; 0xa6uy; 0xd0uy; 0xabuy; 0x1cuy; 0x4cuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public4_len: (x:UInt32.t { UInt32.v x = B.length public4 }) =
32ul
let result4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xc3uy; 0xdauy; 0x55uy; 0x37uy; 0x9duy; 0xe9uy; 0xc6uy; 0x90uy; 0x8euy; 0x94uy; 0xeauy; 0x4duy; 0xf2uy; 0x8duy; 0x08uy; 0x4fuy; 0x32uy; 0xecuy; 0xcfuy; 0x03uy; 0x49uy; 0x1cuy; 0x71uy; 0xf7uy; 0x54uy; 0xb4uy; 0x07uy; 0x55uy; 0x77uy; 0xa2uy; 0x85uy; 0x52uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result4_len: (x:UInt32.t { UInt32.v x = B.length result4 }) =
32ul
inline_for_extraction let valid4 = true
let private_5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x02uy; 0x03uy; 0x04uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_5_len: (x:UInt32.t { UInt32.v x = B.length private_5 }) =
32ul
let public5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public5_len: (x:UInt32.t { UInt32.v x = B.length public5 }) =
32ul
let result5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result5_len: (x:UInt32.t { UInt32.v x = B.length result5 }) =
32ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Prims.bool | Prims.Tot | [
"total"
] | [] | [] | [] | false | false | false | true | false | let valid5 =
| false | false |
|
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.valid6 | val valid6 : Prims.bool | let valid6 = false | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 40,
"end_line": 187,
"start_col": 22,
"start_line": 187
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul
inline_for_extraction let valid3 = true
let private_4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy; 0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy; 0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_4_len: (x:UInt32.t { UInt32.v x = B.length private_4 }) =
32ul
let public4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe6uy; 0xdbuy; 0x68uy; 0x67uy; 0x58uy; 0x30uy; 0x30uy; 0xdbuy; 0x35uy; 0x94uy; 0xc1uy; 0xa4uy; 0x24uy; 0xb1uy; 0x5fuy; 0x7cuy; 0x72uy; 0x66uy; 0x24uy; 0xecuy; 0x26uy; 0xb3uy; 0x35uy; 0x3buy; 0x10uy; 0xa9uy; 0x03uy; 0xa6uy; 0xd0uy; 0xabuy; 0x1cuy; 0x4cuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public4_len: (x:UInt32.t { UInt32.v x = B.length public4 }) =
32ul
let result4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xc3uy; 0xdauy; 0x55uy; 0x37uy; 0x9duy; 0xe9uy; 0xc6uy; 0x90uy; 0x8euy; 0x94uy; 0xeauy; 0x4duy; 0xf2uy; 0x8duy; 0x08uy; 0x4fuy; 0x32uy; 0xecuy; 0xcfuy; 0x03uy; 0x49uy; 0x1cuy; 0x71uy; 0xf7uy; 0x54uy; 0xb4uy; 0x07uy; 0x55uy; 0x77uy; 0xa2uy; 0x85uy; 0x52uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result4_len: (x:UInt32.t { UInt32.v x = B.length result4 }) =
32ul
inline_for_extraction let valid4 = true
let private_5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x02uy; 0x03uy; 0x04uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_5_len: (x:UInt32.t { UInt32.v x = B.length private_5 }) =
32ul
let public5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public5_len: (x:UInt32.t { UInt32.v x = B.length public5 }) =
32ul
let result5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result5_len: (x:UInt32.t { UInt32.v x = B.length result5 }) =
32ul
inline_for_extraction let valid5 = false
let private_6: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x02uy; 0x04uy; 0x06uy; 0x08uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_6_len: (x:UInt32.t { UInt32.v x = B.length private_6 }) =
32ul
let public6: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe0uy; 0xebuy; 0x7auy; 0x7cuy; 0x3buy; 0x41uy; 0xb8uy; 0xaeuy; 0x16uy; 0x56uy; 0xe3uy; 0xfauy; 0xf1uy; 0x9fuy; 0xc4uy; 0x6auy; 0xdauy; 0x09uy; 0x8duy; 0xebuy; 0x9cuy; 0x32uy; 0xb1uy; 0xfduy; 0x86uy; 0x62uy; 0x05uy; 0x16uy; 0x5fuy; 0x49uy; 0xb8uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public6_len: (x:UInt32.t { UInt32.v x = B.length public6 }) =
32ul
let result6: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result6_len: (x:UInt32.t { UInt32.v x = B.length result6 }) =
32ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Prims.bool | Prims.Tot | [
"total"
] | [] | [] | [] | false | false | false | true | false | let valid6 =
| false | false |
|
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.result0_len | val result0_len:(x: UInt32.t{UInt32.v x = B.length result0}) | val result0_len:(x: UInt32.t{UInt32.v x = B.length result0}) | let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 29,
"start_col": 22,
"start_line": 28
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.result0} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let result0_len:(x: UInt32.t{UInt32.v x = B.length result0}) =
| 32ul | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.private_2 | val private_2:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | val private_2:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 62,
"start_col": 0,
"start_line": 59
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 32 /\ LowStar.Monotonic.Buffer.recallable b} | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | false | false | false | false | false | let private_2:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) =
| [@@ inline_let ]let l =
[
0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy;
0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy;
0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy
]
in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.result0 | val result0:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | val result0:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 26,
"start_col": 0,
"start_line": 23
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 32 /\ LowStar.Monotonic.Buffer.recallable b} | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | false | false | false | false | false | let result0:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) =
| [@@ inline_let ]let l =
[
0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy;
0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy;
0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy
]
in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.public3_len | val public3_len:(x: UInt32.t{UInt32.v x = B.length public3}) | val public3_len:(x: UInt32.t{UInt32.v x = B.length public3}) | let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 99,
"start_col": 22,
"start_line": 98
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.public3} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let public3_len:(x: UInt32.t{UInt32.v x = B.length public3}) =
| 32ul | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.result2_len | val result2_len:(x: UInt32.t{UInt32.v x = B.length result2}) | val result2_len:(x: UInt32.t{UInt32.v x = B.length result2}) | let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 81,
"start_col": 22,
"start_line": 80
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.result2} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let result2_len:(x: UInt32.t{UInt32.v x = B.length result2}) =
| 32ul | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.result1_len | val result1_len:(x: UInt32.t{UInt32.v x = B.length result1}) | val result1_len:(x: UInt32.t{UInt32.v x = B.length result1}) | let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 55,
"start_col": 22,
"start_line": 54
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.result1} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let result1_len:(x: UInt32.t{UInt32.v x = B.length result1}) =
| 32ul | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.result1 | val result1:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | val result1:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 52,
"start_col": 0,
"start_line": 49
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 32 /\ LowStar.Monotonic.Buffer.recallable b} | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | false | false | false | false | false | let result1:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) =
| [@@ inline_let ]let l =
[
0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy;
0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy;
0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy
]
in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.public0_len | val public0_len:(x: UInt32.t{UInt32.v x = B.length public0}) | val public0_len:(x: UInt32.t{UInt32.v x = B.length public0}) | let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 21,
"start_col": 22,
"start_line": 20
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.public0} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let public0_len:(x: UInt32.t{UInt32.v x = B.length public0}) =
| 32ul | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.private_3_len | val private_3_len:(x: UInt32.t{UInt32.v x = B.length private_3}) | val private_3_len:(x: UInt32.t{UInt32.v x = B.length private_3}) | let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 91,
"start_col": 22,
"start_line": 90
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.private_3} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let private_3_len:(x: UInt32.t{UInt32.v x = B.length private_3}) =
| 32ul | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.public1_len | val public1_len:(x: UInt32.t{UInt32.v x = B.length public1}) | val public1_len:(x: UInt32.t{UInt32.v x = B.length public1}) | let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 47,
"start_col": 22,
"start_line": 46
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.public1} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let public1_len:(x: UInt32.t{UInt32.v x = B.length public1}) =
| 32ul | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.public2_len | val public2_len:(x: UInt32.t{UInt32.v x = B.length public2}) | val public2_len:(x: UInt32.t{UInt32.v x = B.length public2}) | let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 73,
"start_col": 22,
"start_line": 72
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.public2} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let public2_len:(x: UInt32.t{UInt32.v x = B.length public2}) =
| 32ul | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.serialize_nondep_then_upd_bw_left | val serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
)) | val serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
)) | let serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
= serialize_nondep_then_upd_left s1 s2 x y | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 42,
"end_line": 539,
"start_col": 0,
"start_line": 520
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= ()
let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 =
Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2
let serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
= serialize_tagged_union
s1
fst
(fun x -> serialize_synth p2 (fun y -> (x, y) <: refine_with_tag fst x) s2 (fun (xy: refine_with_tag fst x) -> snd xy) ())
let serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
= ()
let length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
= ()
let serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_left s (serialize s1 y);
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x))
let serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
= serialize_nondep_then_upd_left s1 s2 x y;
let s = serialize (serialize_nondep_then s1 s2) x in
let s1' = serialize s1 (fst x) in
let l1 = Seq.length s1' in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) s1' (serialize s2 (snd x));
seq_upd_seq_right_to_left s 0 s1' i' s';
seq_upd_seq_slice_idem s 0 (Seq.length s1') | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s1:
LowParse.Spec.Base.serializer p1
{ Mkparser_kind'?.parser_kind_subkind k1 ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong } ->
s2: LowParse.Spec.Base.serializer p2 ->
x: (t1 * t2) ->
y: t1
-> FStar.Pervasives.Lemma
(requires
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s1 y) ==
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s1 (FStar.Pervasives.Native.fst x)))
(ensures
(let s =
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_nondep_then s1 s2) x
in
let len2 =
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s2 (FStar.Pervasives.Native.snd x))
in
len2 + FStar.Seq.Base.length (LowParse.Spec.Base.serialize s1 y) <=
FStar.Seq.Base.length s /\
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_nondep_then s1 s2)
(y,
FStar.Pervasives.Native.snd x) ==
LowParse.Spec.Base.seq_upd_bw_seq s len2 (LowParse.Spec.Base.serialize s1 y))) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Combinators.serialize_nondep_then_upd_left",
"Prims.unit",
"Prims.nat",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"LowParse.Spec.Base.serialize",
"FStar.Pervasives.Native.fst",
"Prims.squash",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.Seq.Base.seq",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.nondep_then",
"LowParse.Spec.Combinators.serialize_nondep_then",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Pervasives.Native.snd",
"LowParse.Spec.Base.seq_upd_bw_seq",
"LowParse.Bytes.bytes",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong})
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma (requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures
(let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) ==
seq_upd_bw_seq s len2 (serialize s1 y))) =
| serialize_nondep_then_upd_left s1 s2 x y | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.public2 | val public2:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | val public2:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 70,
"start_col": 0,
"start_line": 67
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 32 /\ LowStar.Monotonic.Buffer.recallable b} | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | false | false | false | false | false | let public2:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) =
| [@@ inline_let ]let l =
[
0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy;
0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy;
0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy
]
in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.private_2_len | val private_2_len:(x: UInt32.t{UInt32.v x = B.length private_2}) | val private_2_len:(x: UInt32.t{UInt32.v x = B.length private_2}) | let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 65,
"start_col": 22,
"start_line": 64
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.private_2} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let private_2_len:(x: UInt32.t{UInt32.v x = B.length private_2}) =
| 32ul | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.result2 | val result2:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | val result2:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 78,
"start_col": 0,
"start_line": 75
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 32 /\ LowStar.Monotonic.Buffer.recallable b} | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | false | false | false | false | false | let result2:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) =
| [@@ inline_let ]let l =
[
0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy;
0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy;
0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy
]
in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.private_4_len | val private_4_len:(x: UInt32.t{UInt32.v x = B.length private_4}) | val private_4_len:(x: UInt32.t{UInt32.v x = B.length private_4}) | let private_4_len: (x:UInt32.t { UInt32.v x = B.length private_4 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 117,
"start_col": 22,
"start_line": 116
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul
inline_for_extraction let valid3 = true
let private_4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy; 0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy; 0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.private_4} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let private_4_len:(x: UInt32.t{UInt32.v x = B.length private_4}) =
| 32ul | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.private_3 | val private_3:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | val private_3:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 88,
"start_col": 0,
"start_line": 85
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 32 /\ LowStar.Monotonic.Buffer.recallable b} | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | false | false | false | false | false | let private_3:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) =
| [@@ inline_let ]let l =
[
0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy;
0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy;
0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy
]
in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.public3 | val public3:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | val public3:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 96,
"start_col": 0,
"start_line": 93
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 32 /\ LowStar.Monotonic.Buffer.recallable b} | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | false | false | false | false | false | let public3:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) =
| [@@ inline_let ]let l =
[
0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy;
0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy;
0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy
]
in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.result3_len | val result3_len:(x: UInt32.t{UInt32.v x = B.length result3}) | val result3_len:(x: UInt32.t{UInt32.v x = B.length result3}) | let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 107,
"start_col": 22,
"start_line": 106
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.result3} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let result3_len:(x: UInt32.t{UInt32.v x = B.length result3}) =
| 32ul | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.result5_len | val result5_len:(x: UInt32.t{UInt32.v x = B.length result5}) | val result5_len:(x: UInt32.t{UInt32.v x = B.length result5}) | let result5_len: (x:UInt32.t { UInt32.v x = B.length result5 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 159,
"start_col": 22,
"start_line": 158
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul
inline_for_extraction let valid3 = true
let private_4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy; 0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy; 0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_4_len: (x:UInt32.t { UInt32.v x = B.length private_4 }) =
32ul
let public4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe6uy; 0xdbuy; 0x68uy; 0x67uy; 0x58uy; 0x30uy; 0x30uy; 0xdbuy; 0x35uy; 0x94uy; 0xc1uy; 0xa4uy; 0x24uy; 0xb1uy; 0x5fuy; 0x7cuy; 0x72uy; 0x66uy; 0x24uy; 0xecuy; 0x26uy; 0xb3uy; 0x35uy; 0x3buy; 0x10uy; 0xa9uy; 0x03uy; 0xa6uy; 0xd0uy; 0xabuy; 0x1cuy; 0x4cuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public4_len: (x:UInt32.t { UInt32.v x = B.length public4 }) =
32ul
let result4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xc3uy; 0xdauy; 0x55uy; 0x37uy; 0x9duy; 0xe9uy; 0xc6uy; 0x90uy; 0x8euy; 0x94uy; 0xeauy; 0x4duy; 0xf2uy; 0x8duy; 0x08uy; 0x4fuy; 0x32uy; 0xecuy; 0xcfuy; 0x03uy; 0x49uy; 0x1cuy; 0x71uy; 0xf7uy; 0x54uy; 0xb4uy; 0x07uy; 0x55uy; 0x77uy; 0xa2uy; 0x85uy; 0x52uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result4_len: (x:UInt32.t { UInt32.v x = B.length result4 }) =
32ul
inline_for_extraction let valid4 = true
let private_5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x02uy; 0x03uy; 0x04uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_5_len: (x:UInt32.t { UInt32.v x = B.length private_5 }) =
32ul
let public5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public5_len: (x:UInt32.t { UInt32.v x = B.length public5 }) =
32ul
let result5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.result5} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let result5_len:(x: UInt32.t{UInt32.v x = B.length result5}) =
| 32ul | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.result4 | val result4:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | val result4:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | let result4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xc3uy; 0xdauy; 0x55uy; 0x37uy; 0x9duy; 0xe9uy; 0xc6uy; 0x90uy; 0x8euy; 0x94uy; 0xeauy; 0x4duy; 0xf2uy; 0x8duy; 0x08uy; 0x4fuy; 0x32uy; 0xecuy; 0xcfuy; 0x03uy; 0x49uy; 0x1cuy; 0x71uy; 0xf7uy; 0x54uy; 0xb4uy; 0x07uy; 0x55uy; 0x77uy; 0xa2uy; 0x85uy; 0x52uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 130,
"start_col": 0,
"start_line": 127
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul
inline_for_extraction let valid3 = true
let private_4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy; 0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy; 0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_4_len: (x:UInt32.t { UInt32.v x = B.length private_4 }) =
32ul
let public4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe6uy; 0xdbuy; 0x68uy; 0x67uy; 0x58uy; 0x30uy; 0x30uy; 0xdbuy; 0x35uy; 0x94uy; 0xc1uy; 0xa4uy; 0x24uy; 0xb1uy; 0x5fuy; 0x7cuy; 0x72uy; 0x66uy; 0x24uy; 0xecuy; 0x26uy; 0xb3uy; 0x35uy; 0x3buy; 0x10uy; 0xa9uy; 0x03uy; 0xa6uy; 0xd0uy; 0xabuy; 0x1cuy; 0x4cuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public4_len: (x:UInt32.t { UInt32.v x = B.length public4 }) =
32ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 32 /\ LowStar.Monotonic.Buffer.recallable b} | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | false | false | false | false | false | let result4:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) =
| [@@ inline_let ]let l =
[
0xc3uy; 0xdauy; 0x55uy; 0x37uy; 0x9duy; 0xe9uy; 0xc6uy; 0x90uy; 0x8euy; 0x94uy; 0xeauy; 0x4duy;
0xf2uy; 0x8duy; 0x08uy; 0x4fuy; 0x32uy; 0xecuy; 0xcfuy; 0x03uy; 0x49uy; 0x1cuy; 0x71uy; 0xf7uy;
0x54uy; 0xb4uy; 0x07uy; 0x55uy; 0x77uy; 0xa2uy; 0x85uy; 0x52uy
]
in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.result3 | val result3:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | val result3:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 104,
"start_col": 0,
"start_line": 101
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 32 /\ LowStar.Monotonic.Buffer.recallable b} | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | false | false | false | false | false | let result3:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) =
| [@@ inline_let ]let l =
[
0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy;
0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy;
0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy
]
in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | false |
LowParse.Spec.Combinators.fst | LowParse.Spec.Combinators.serialize_nondep_then_upd_bw_left_chain | val serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
)) | val serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
)) | let serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
= let j' = Seq.length (serialize s1 (fst x)) - i' - Seq.length s' in
serialize_nondep_then_upd_left_chain s1 s2 x y j' s';
assert (j' == Seq.length (serialize (serialize_nondep_then s1 s2) x) - (Seq.length (serialize s2 (snd x)) + i') - Seq.length s') | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 130,
"end_line": 570,
"start_col": 0,
"start_line": 543
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= ()
let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 =
Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2
let serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
= serialize_tagged_union
s1
fst
(fun x -> serialize_synth p2 (fun y -> (x, y) <: refine_with_tag fst x) s2 (fun (xy: refine_with_tag fst x) -> snd xy) ())
let serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
= ()
let length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
= ()
let serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_left s (serialize s1 y);
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x))
let serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
= serialize_nondep_then_upd_left s1 s2 x y;
let s = serialize (serialize_nondep_then s1 s2) x in
let s1' = serialize s1 (fst x) in
let l1 = Seq.length s1' in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) s1' (serialize s2 (snd x));
seq_upd_seq_right_to_left s 0 s1' i' s';
seq_upd_seq_slice_idem s 0 (Seq.length s1')
let serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
= serialize_nondep_then_upd_left s1 s2 x y
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'" | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [
"smt.arith.nl=false"
],
"z3refresh": true,
"z3rlimit": 64,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s1:
LowParse.Spec.Base.serializer p1
{ Mkparser_kind'?.parser_kind_subkind k1 ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong } ->
s2: LowParse.Spec.Base.serializer p2 ->
x: (t1 * t2) ->
y: t1 ->
i': Prims.nat ->
s': LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(requires
(let s1' = LowParse.Spec.Base.serialize s1 (FStar.Pervasives.Native.fst x) in
i' + FStar.Seq.Base.length s' <= FStar.Seq.Base.length s1' /\
LowParse.Spec.Base.serialize s1 y == LowParse.Spec.Base.seq_upd_bw_seq s1' i' s'))
(ensures
(let s =
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_nondep_then s1 s2) x
in
let len2 =
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s2 (FStar.Pervasives.Native.snd x))
in
len2 + i' + FStar.Seq.Base.length s' <= FStar.Seq.Base.length s /\
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_nondep_then s1 s2)
(y,
FStar.Pervasives.Native.snd x) ==
LowParse.Spec.Base.seq_upd_bw_seq s (len2 + i') s')) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"FStar.Pervasives.Native.tuple2",
"Prims.nat",
"LowParse.Bytes.bytes",
"Prims._assert",
"Prims.int",
"Prims.op_Subtraction",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"LowParse.Spec.Base.serialize",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.nondep_then",
"LowParse.Spec.Combinators.serialize_nondep_then",
"Prims.op_Addition",
"FStar.Pervasives.Native.snd",
"Prims.unit",
"LowParse.Spec.Combinators.serialize_nondep_then_upd_left_chain",
"FStar.Pervasives.Native.fst",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.Seq.Base.seq",
"LowParse.Spec.Base.seq_upd_bw_seq",
"Prims.squash",
"FStar.Pervasives.Native.Mktuple2",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong})
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i': nat)
(s': bytes)
: Lemma
(requires
(let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\ serialize s1 y == seq_upd_bw_seq s1' i' s'))
(ensures
(let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s')) =
| let j' = Seq.length (serialize s1 (fst x)) - i' - Seq.length s' in
serialize_nondep_then_upd_left_chain s1 s2 x y j' s';
assert (j' ==
Seq.length (serialize (serialize_nondep_then s1 s2) x) -
(Seq.length (serialize s2 (snd x)) + i') -
Seq.length s') | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.public5 | val public5:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | val public5:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | let public5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 148,
"start_col": 0,
"start_line": 145
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul
inline_for_extraction let valid3 = true
let private_4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy; 0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy; 0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_4_len: (x:UInt32.t { UInt32.v x = B.length private_4 }) =
32ul
let public4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe6uy; 0xdbuy; 0x68uy; 0x67uy; 0x58uy; 0x30uy; 0x30uy; 0xdbuy; 0x35uy; 0x94uy; 0xc1uy; 0xa4uy; 0x24uy; 0xb1uy; 0x5fuy; 0x7cuy; 0x72uy; 0x66uy; 0x24uy; 0xecuy; 0x26uy; 0xb3uy; 0x35uy; 0x3buy; 0x10uy; 0xa9uy; 0x03uy; 0xa6uy; 0xd0uy; 0xabuy; 0x1cuy; 0x4cuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public4_len: (x:UInt32.t { UInt32.v x = B.length public4 }) =
32ul
let result4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xc3uy; 0xdauy; 0x55uy; 0x37uy; 0x9duy; 0xe9uy; 0xc6uy; 0x90uy; 0x8euy; 0x94uy; 0xeauy; 0x4duy; 0xf2uy; 0x8duy; 0x08uy; 0x4fuy; 0x32uy; 0xecuy; 0xcfuy; 0x03uy; 0x49uy; 0x1cuy; 0x71uy; 0xf7uy; 0x54uy; 0xb4uy; 0x07uy; 0x55uy; 0x77uy; 0xa2uy; 0x85uy; 0x52uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result4_len: (x:UInt32.t { UInt32.v x = B.length result4 }) =
32ul
inline_for_extraction let valid4 = true
let private_5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x02uy; 0x03uy; 0x04uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_5_len: (x:UInt32.t { UInt32.v x = B.length private_5 }) =
32ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 32 /\ LowStar.Monotonic.Buffer.recallable b} | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | false | false | false | false | false | let public5:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) =
| [@@ inline_let ]let l =
[
0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy;
0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy;
0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy
]
in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.public4 | val public4:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | val public4:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | let public4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe6uy; 0xdbuy; 0x68uy; 0x67uy; 0x58uy; 0x30uy; 0x30uy; 0xdbuy; 0x35uy; 0x94uy; 0xc1uy; 0xa4uy; 0x24uy; 0xb1uy; 0x5fuy; 0x7cuy; 0x72uy; 0x66uy; 0x24uy; 0xecuy; 0x26uy; 0xb3uy; 0x35uy; 0x3buy; 0x10uy; 0xa9uy; 0x03uy; 0xa6uy; 0xd0uy; 0xabuy; 0x1cuy; 0x4cuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 122,
"start_col": 0,
"start_line": 119
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul
inline_for_extraction let valid3 = true
let private_4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy; 0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy; 0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_4_len: (x:UInt32.t { UInt32.v x = B.length private_4 }) =
32ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 32 /\ LowStar.Monotonic.Buffer.recallable b} | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | false | false | false | false | false | let public4:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) =
| [@@ inline_let ]let l =
[
0xe6uy; 0xdbuy; 0x68uy; 0x67uy; 0x58uy; 0x30uy; 0x30uy; 0xdbuy; 0x35uy; 0x94uy; 0xc1uy; 0xa4uy;
0x24uy; 0xb1uy; 0x5fuy; 0x7cuy; 0x72uy; 0x66uy; 0x24uy; 0xecuy; 0x26uy; 0xb3uy; 0x35uy; 0x3buy;
0x10uy; 0xa9uy; 0x03uy; 0xa6uy; 0xd0uy; 0xabuy; 0x1cuy; 0x4cuy
]
in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.private_4 | val private_4:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | val private_4:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | let private_4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy; 0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy; 0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 114,
"start_col": 0,
"start_line": 111
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul
inline_for_extraction let valid3 = true | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 32 /\ LowStar.Monotonic.Buffer.recallable b} | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | false | false | false | false | false | let private_4:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) =
| [@@ inline_let ]let l =
[
0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy;
0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy;
0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy
]
in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.public6_len | val public6_len:(x: UInt32.t{UInt32.v x = B.length public6}) | val public6_len:(x: UInt32.t{UInt32.v x = B.length public6}) | let public6_len: (x:UInt32.t { UInt32.v x = B.length public6 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 177,
"start_col": 22,
"start_line": 176
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul
inline_for_extraction let valid3 = true
let private_4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy; 0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy; 0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_4_len: (x:UInt32.t { UInt32.v x = B.length private_4 }) =
32ul
let public4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe6uy; 0xdbuy; 0x68uy; 0x67uy; 0x58uy; 0x30uy; 0x30uy; 0xdbuy; 0x35uy; 0x94uy; 0xc1uy; 0xa4uy; 0x24uy; 0xb1uy; 0x5fuy; 0x7cuy; 0x72uy; 0x66uy; 0x24uy; 0xecuy; 0x26uy; 0xb3uy; 0x35uy; 0x3buy; 0x10uy; 0xa9uy; 0x03uy; 0xa6uy; 0xd0uy; 0xabuy; 0x1cuy; 0x4cuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public4_len: (x:UInt32.t { UInt32.v x = B.length public4 }) =
32ul
let result4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xc3uy; 0xdauy; 0x55uy; 0x37uy; 0x9duy; 0xe9uy; 0xc6uy; 0x90uy; 0x8euy; 0x94uy; 0xeauy; 0x4duy; 0xf2uy; 0x8duy; 0x08uy; 0x4fuy; 0x32uy; 0xecuy; 0xcfuy; 0x03uy; 0x49uy; 0x1cuy; 0x71uy; 0xf7uy; 0x54uy; 0xb4uy; 0x07uy; 0x55uy; 0x77uy; 0xa2uy; 0x85uy; 0x52uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result4_len: (x:UInt32.t { UInt32.v x = B.length result4 }) =
32ul
inline_for_extraction let valid4 = true
let private_5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x02uy; 0x03uy; 0x04uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_5_len: (x:UInt32.t { UInt32.v x = B.length private_5 }) =
32ul
let public5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public5_len: (x:UInt32.t { UInt32.v x = B.length public5 }) =
32ul
let result5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result5_len: (x:UInt32.t { UInt32.v x = B.length result5 }) =
32ul
inline_for_extraction let valid5 = false
let private_6: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x02uy; 0x04uy; 0x06uy; 0x08uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_6_len: (x:UInt32.t { UInt32.v x = B.length private_6 }) =
32ul
let public6: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe0uy; 0xebuy; 0x7auy; 0x7cuy; 0x3buy; 0x41uy; 0xb8uy; 0xaeuy; 0x16uy; 0x56uy; 0xe3uy; 0xfauy; 0xf1uy; 0x9fuy; 0xc4uy; 0x6auy; 0xdauy; 0x09uy; 0x8duy; 0xebuy; 0x9cuy; 0x32uy; 0xb1uy; 0xfduy; 0x86uy; 0x62uy; 0x05uy; 0x16uy; 0x5fuy; 0x49uy; 0xb8uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.public6} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let public6_len:(x: UInt32.t{UInt32.v x = B.length public6}) =
| 32ul | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.public4_len | val public4_len:(x: UInt32.t{UInt32.v x = B.length public4}) | val public4_len:(x: UInt32.t{UInt32.v x = B.length public4}) | let public4_len: (x:UInt32.t { UInt32.v x = B.length public4 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 125,
"start_col": 22,
"start_line": 124
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul
inline_for_extraction let valid3 = true
let private_4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy; 0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy; 0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_4_len: (x:UInt32.t { UInt32.v x = B.length private_4 }) =
32ul
let public4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe6uy; 0xdbuy; 0x68uy; 0x67uy; 0x58uy; 0x30uy; 0x30uy; 0xdbuy; 0x35uy; 0x94uy; 0xc1uy; 0xa4uy; 0x24uy; 0xb1uy; 0x5fuy; 0x7cuy; 0x72uy; 0x66uy; 0x24uy; 0xecuy; 0x26uy; 0xb3uy; 0x35uy; 0x3buy; 0x10uy; 0xa9uy; 0x03uy; 0xa6uy; 0xd0uy; 0xabuy; 0x1cuy; 0x4cuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.public4} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let public4_len:(x: UInt32.t{UInt32.v x = B.length public4}) =
| 32ul | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.public5_len | val public5_len:(x: UInt32.t{UInt32.v x = B.length public5}) | val public5_len:(x: UInt32.t{UInt32.v x = B.length public5}) | let public5_len: (x:UInt32.t { UInt32.v x = B.length public5 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 151,
"start_col": 22,
"start_line": 150
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul
inline_for_extraction let valid3 = true
let private_4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy; 0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy; 0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_4_len: (x:UInt32.t { UInt32.v x = B.length private_4 }) =
32ul
let public4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe6uy; 0xdbuy; 0x68uy; 0x67uy; 0x58uy; 0x30uy; 0x30uy; 0xdbuy; 0x35uy; 0x94uy; 0xc1uy; 0xa4uy; 0x24uy; 0xb1uy; 0x5fuy; 0x7cuy; 0x72uy; 0x66uy; 0x24uy; 0xecuy; 0x26uy; 0xb3uy; 0x35uy; 0x3buy; 0x10uy; 0xa9uy; 0x03uy; 0xa6uy; 0xd0uy; 0xabuy; 0x1cuy; 0x4cuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public4_len: (x:UInt32.t { UInt32.v x = B.length public4 }) =
32ul
let result4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xc3uy; 0xdauy; 0x55uy; 0x37uy; 0x9duy; 0xe9uy; 0xc6uy; 0x90uy; 0x8euy; 0x94uy; 0xeauy; 0x4duy; 0xf2uy; 0x8duy; 0x08uy; 0x4fuy; 0x32uy; 0xecuy; 0xcfuy; 0x03uy; 0x49uy; 0x1cuy; 0x71uy; 0xf7uy; 0x54uy; 0xb4uy; 0x07uy; 0x55uy; 0x77uy; 0xa2uy; 0x85uy; 0x52uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result4_len: (x:UInt32.t { UInt32.v x = B.length result4 }) =
32ul
inline_for_extraction let valid4 = true
let private_5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x02uy; 0x03uy; 0x04uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_5_len: (x:UInt32.t { UInt32.v x = B.length private_5 }) =
32ul
let public5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.public5} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let public5_len:(x: UInt32.t{UInt32.v x = B.length public5}) =
| 32ul | false |
Lib.Meta.fst | Lib.Meta.is_hex_digit | val is_hex_digit: Char.char -> bool | val is_hex_digit: Char.char -> bool | let is_hex_digit = function
| '0'
| '1'
| '2'
| '3'
| '4'
| '5'
| '6'
| '7'
| '8'
| '9'
| 'a' | 'A'
| 'b' | 'B'
| 'c' | 'C'
| 'd' | 'D'
| 'e' | 'E'
| 'f' | 'F' -> true
| _ -> false | {
"file_name": "lib/Lib.Meta.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 14,
"end_line": 28,
"start_col": 0,
"start_line": 11
} | module Lib.Meta
open Lib.IntTypes
/// Helpers used in tests and tactics (see e.g. Test.LowStarize)
#set-options "--max_fuel 0 --max_ifuel 0 --z3rlimit 50" | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.IntTypes.fsti.checked",
"FStar.String.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.Char.fsti.checked"
],
"interface_file": false,
"source_file": "Lib.Meta.fst"
} | [
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | _: FStar.Char.char -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"FStar.Char.char",
"Prims.bool"
] | [] | false | false | false | true | false | let is_hex_digit =
| function
| '0'
| '1'
| '2'
| '3'
| '4'
| '5'
| '6'
| '7'
| '8'
| '9'
| 'a'
| 'A'
| 'b'
| 'B'
| 'c'
| 'C'
| 'd'
| 'D'
| 'e'
| 'E'
| 'f'
| 'F' -> true
| _ -> false | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.result4_len | val result4_len:(x: UInt32.t{UInt32.v x = B.length result4}) | val result4_len:(x: UInt32.t{UInt32.v x = B.length result4}) | let result4_len: (x:UInt32.t { UInt32.v x = B.length result4 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 133,
"start_col": 22,
"start_line": 132
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul
inline_for_extraction let valid3 = true
let private_4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy; 0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy; 0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_4_len: (x:UInt32.t { UInt32.v x = B.length private_4 }) =
32ul
let public4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe6uy; 0xdbuy; 0x68uy; 0x67uy; 0x58uy; 0x30uy; 0x30uy; 0xdbuy; 0x35uy; 0x94uy; 0xc1uy; 0xa4uy; 0x24uy; 0xb1uy; 0x5fuy; 0x7cuy; 0x72uy; 0x66uy; 0x24uy; 0xecuy; 0x26uy; 0xb3uy; 0x35uy; 0x3buy; 0x10uy; 0xa9uy; 0x03uy; 0xa6uy; 0xd0uy; 0xabuy; 0x1cuy; 0x4cuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public4_len: (x:UInt32.t { UInt32.v x = B.length public4 }) =
32ul
let result4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xc3uy; 0xdauy; 0x55uy; 0x37uy; 0x9duy; 0xe9uy; 0xc6uy; 0x90uy; 0x8euy; 0x94uy; 0xeauy; 0x4duy; 0xf2uy; 0x8duy; 0x08uy; 0x4fuy; 0x32uy; 0xecuy; 0xcfuy; 0x03uy; 0x49uy; 0x1cuy; 0x71uy; 0xf7uy; 0x54uy; 0xb4uy; 0x07uy; 0x55uy; 0x77uy; 0xa2uy; 0x85uy; 0x52uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.result4} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let result4_len:(x: UInt32.t{UInt32.v x = B.length result4}) =
| 32ul | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.private_6_len | val private_6_len:(x: UInt32.t{UInt32.v x = B.length private_6}) | val private_6_len:(x: UInt32.t{UInt32.v x = B.length private_6}) | let private_6_len: (x:UInt32.t { UInt32.v x = B.length private_6 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 169,
"start_col": 22,
"start_line": 168
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul
inline_for_extraction let valid3 = true
let private_4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy; 0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy; 0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_4_len: (x:UInt32.t { UInt32.v x = B.length private_4 }) =
32ul
let public4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe6uy; 0xdbuy; 0x68uy; 0x67uy; 0x58uy; 0x30uy; 0x30uy; 0xdbuy; 0x35uy; 0x94uy; 0xc1uy; 0xa4uy; 0x24uy; 0xb1uy; 0x5fuy; 0x7cuy; 0x72uy; 0x66uy; 0x24uy; 0xecuy; 0x26uy; 0xb3uy; 0x35uy; 0x3buy; 0x10uy; 0xa9uy; 0x03uy; 0xa6uy; 0xd0uy; 0xabuy; 0x1cuy; 0x4cuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public4_len: (x:UInt32.t { UInt32.v x = B.length public4 }) =
32ul
let result4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xc3uy; 0xdauy; 0x55uy; 0x37uy; 0x9duy; 0xe9uy; 0xc6uy; 0x90uy; 0x8euy; 0x94uy; 0xeauy; 0x4duy; 0xf2uy; 0x8duy; 0x08uy; 0x4fuy; 0x32uy; 0xecuy; 0xcfuy; 0x03uy; 0x49uy; 0x1cuy; 0x71uy; 0xf7uy; 0x54uy; 0xb4uy; 0x07uy; 0x55uy; 0x77uy; 0xa2uy; 0x85uy; 0x52uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result4_len: (x:UInt32.t { UInt32.v x = B.length result4 }) =
32ul
inline_for_extraction let valid4 = true
let private_5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x02uy; 0x03uy; 0x04uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_5_len: (x:UInt32.t { UInt32.v x = B.length private_5 }) =
32ul
let public5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public5_len: (x:UInt32.t { UInt32.v x = B.length public5 }) =
32ul
let result5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result5_len: (x:UInt32.t { UInt32.v x = B.length result5 }) =
32ul
inline_for_extraction let valid5 = false
let private_6: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x02uy; 0x04uy; 0x06uy; 0x08uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.private_6} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let private_6_len:(x: UInt32.t{UInt32.v x = B.length private_6}) =
| 32ul | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.private_5_len | val private_5_len:(x: UInt32.t{UInt32.v x = B.length private_5}) | val private_5_len:(x: UInt32.t{UInt32.v x = B.length private_5}) | let private_5_len: (x:UInt32.t { UInt32.v x = B.length private_5 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 143,
"start_col": 22,
"start_line": 142
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul
inline_for_extraction let valid3 = true
let private_4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy; 0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy; 0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_4_len: (x:UInt32.t { UInt32.v x = B.length private_4 }) =
32ul
let public4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe6uy; 0xdbuy; 0x68uy; 0x67uy; 0x58uy; 0x30uy; 0x30uy; 0xdbuy; 0x35uy; 0x94uy; 0xc1uy; 0xa4uy; 0x24uy; 0xb1uy; 0x5fuy; 0x7cuy; 0x72uy; 0x66uy; 0x24uy; 0xecuy; 0x26uy; 0xb3uy; 0x35uy; 0x3buy; 0x10uy; 0xa9uy; 0x03uy; 0xa6uy; 0xd0uy; 0xabuy; 0x1cuy; 0x4cuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public4_len: (x:UInt32.t { UInt32.v x = B.length public4 }) =
32ul
let result4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xc3uy; 0xdauy; 0x55uy; 0x37uy; 0x9duy; 0xe9uy; 0xc6uy; 0x90uy; 0x8euy; 0x94uy; 0xeauy; 0x4duy; 0xf2uy; 0x8duy; 0x08uy; 0x4fuy; 0x32uy; 0xecuy; 0xcfuy; 0x03uy; 0x49uy; 0x1cuy; 0x71uy; 0xf7uy; 0x54uy; 0xb4uy; 0x07uy; 0x55uy; 0x77uy; 0xa2uy; 0x85uy; 0x52uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result4_len: (x:UInt32.t { UInt32.v x = B.length result4 }) =
32ul
inline_for_extraction let valid4 = true
let private_5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x02uy; 0x03uy; 0x04uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.private_5} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let private_5_len:(x: UInt32.t{UInt32.v x = B.length private_5}) =
| 32ul | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.result6_len | val result6_len:(x: UInt32.t{UInt32.v x = B.length result6}) | val result6_len:(x: UInt32.t{UInt32.v x = B.length result6}) | let result6_len: (x:UInt32.t { UInt32.v x = B.length result6 }) =
32ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 6,
"end_line": 185,
"start_col": 22,
"start_line": 184
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul
inline_for_extraction let valid3 = true
let private_4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy; 0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy; 0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_4_len: (x:UInt32.t { UInt32.v x = B.length private_4 }) =
32ul
let public4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe6uy; 0xdbuy; 0x68uy; 0x67uy; 0x58uy; 0x30uy; 0x30uy; 0xdbuy; 0x35uy; 0x94uy; 0xc1uy; 0xa4uy; 0x24uy; 0xb1uy; 0x5fuy; 0x7cuy; 0x72uy; 0x66uy; 0x24uy; 0xecuy; 0x26uy; 0xb3uy; 0x35uy; 0x3buy; 0x10uy; 0xa9uy; 0x03uy; 0xa6uy; 0xd0uy; 0xabuy; 0x1cuy; 0x4cuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public4_len: (x:UInt32.t { UInt32.v x = B.length public4 }) =
32ul
let result4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xc3uy; 0xdauy; 0x55uy; 0x37uy; 0x9duy; 0xe9uy; 0xc6uy; 0x90uy; 0x8euy; 0x94uy; 0xeauy; 0x4duy; 0xf2uy; 0x8duy; 0x08uy; 0x4fuy; 0x32uy; 0xecuy; 0xcfuy; 0x03uy; 0x49uy; 0x1cuy; 0x71uy; 0xf7uy; 0x54uy; 0xb4uy; 0x07uy; 0x55uy; 0x77uy; 0xa2uy; 0x85uy; 0x52uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result4_len: (x:UInt32.t { UInt32.v x = B.length result4 }) =
32ul
inline_for_extraction let valid4 = true
let private_5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x02uy; 0x03uy; 0x04uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_5_len: (x:UInt32.t { UInt32.v x = B.length private_5 }) =
32ul
let public5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public5_len: (x:UInt32.t { UInt32.v x = B.length public5 }) =
32ul
let result5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result5_len: (x:UInt32.t { UInt32.v x = B.length result5 }) =
32ul
inline_for_extraction let valid5 = false
let private_6: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x02uy; 0x04uy; 0x06uy; 0x08uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_6_len: (x:UInt32.t { UInt32.v x = B.length private_6 }) =
32ul
let public6: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe0uy; 0xebuy; 0x7auy; 0x7cuy; 0x3buy; 0x41uy; 0xb8uy; 0xaeuy; 0x16uy; 0x56uy; 0xe3uy; 0xfauy; 0xf1uy; 0x9fuy; 0xc4uy; 0x6auy; 0xdauy; 0x09uy; 0x8duy; 0xebuy; 0x9cuy; 0x32uy; 0xb1uy; 0xfduy; 0x86uy; 0x62uy; 0x05uy; 0x16uy; 0x5fuy; 0x49uy; 0xb8uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public6_len: (x:UInt32.t { UInt32.v x = B.length public6 }) =
32ul
let result6: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.result6} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let result6_len:(x: UInt32.t{UInt32.v x = B.length result6}) =
| 32ul | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.vectors_len | val vectors_len:(x: UInt32.t{UInt32.v x = B.length vectors}) | val vectors_len:(x: UInt32.t{UInt32.v x = B.length vectors}) | let vectors_len: (x:UInt32.t { UInt32.v x = B.length vectors }) =
7ul | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 5,
"end_line": 214,
"start_col": 0,
"start_line": 213
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul
inline_for_extraction let valid3 = true
let private_4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy; 0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy; 0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_4_len: (x:UInt32.t { UInt32.v x = B.length private_4 }) =
32ul
let public4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe6uy; 0xdbuy; 0x68uy; 0x67uy; 0x58uy; 0x30uy; 0x30uy; 0xdbuy; 0x35uy; 0x94uy; 0xc1uy; 0xa4uy; 0x24uy; 0xb1uy; 0x5fuy; 0x7cuy; 0x72uy; 0x66uy; 0x24uy; 0xecuy; 0x26uy; 0xb3uy; 0x35uy; 0x3buy; 0x10uy; 0xa9uy; 0x03uy; 0xa6uy; 0xd0uy; 0xabuy; 0x1cuy; 0x4cuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public4_len: (x:UInt32.t { UInt32.v x = B.length public4 }) =
32ul
let result4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xc3uy; 0xdauy; 0x55uy; 0x37uy; 0x9duy; 0xe9uy; 0xc6uy; 0x90uy; 0x8euy; 0x94uy; 0xeauy; 0x4duy; 0xf2uy; 0x8duy; 0x08uy; 0x4fuy; 0x32uy; 0xecuy; 0xcfuy; 0x03uy; 0x49uy; 0x1cuy; 0x71uy; 0xf7uy; 0x54uy; 0xb4uy; 0x07uy; 0x55uy; 0x77uy; 0xa2uy; 0x85uy; 0x52uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result4_len: (x:UInt32.t { UInt32.v x = B.length result4 }) =
32ul
inline_for_extraction let valid4 = true
let private_5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x02uy; 0x03uy; 0x04uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_5_len: (x:UInt32.t { UInt32.v x = B.length private_5 }) =
32ul
let public5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public5_len: (x:UInt32.t { UInt32.v x = B.length public5 }) =
32ul
let result5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result5_len: (x:UInt32.t { UInt32.v x = B.length result5 }) =
32ul
inline_for_extraction let valid5 = false
let private_6: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x02uy; 0x04uy; 0x06uy; 0x08uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_6_len: (x:UInt32.t { UInt32.v x = B.length private_6 }) =
32ul
let public6: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe0uy; 0xebuy; 0x7auy; 0x7cuy; 0x3buy; 0x41uy; 0xb8uy; 0xaeuy; 0x16uy; 0x56uy; 0xe3uy; 0xfauy; 0xf1uy; 0x9fuy; 0xc4uy; 0x6auy; 0xdauy; 0x09uy; 0x8duy; 0xebuy; 0x9cuy; 0x32uy; 0xb1uy; 0xfduy; 0x86uy; 0x62uy; 0x05uy; 0x16uy; 0x5fuy; 0x49uy; 0xb8uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public6_len: (x:UInt32.t { UInt32.v x = B.length public6 }) =
32ul
let result6: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result6_len: (x:UInt32.t { UInt32.v x = B.length result6 }) =
32ul
inline_for_extraction let valid6 = false
noeq
type vector = | Vector:
result: B.buffer UInt8.t { B.recallable result } ->
result_len: UInt32.t { B.length result = UInt32.v result_len } ->
public: B.buffer UInt8.t { B.recallable public } ->
public_len: UInt32.t { B.length public = UInt32.v public_len } ->
private_: B.buffer UInt8.t { B.recallable private_ } ->
private__len: UInt32.t { B.length private_ = UInt32.v private__len } ->
valid: bool ->
vector
let vectors: (b: B.buffer vector { B.length b = 7 /\ B.recallable b }) =
[@inline_let] let l = [
Vector result0 result0_len public0 public0_len private_0 private_0_len valid0;
Vector result1 result1_len public1 public1_len private_1 private_1_len valid1;
Vector result2 result2_len public2 public2_len private_2 private_2_len valid2;
Vector result3 result3_len public3 public3_len private_3 private_3_len valid3;
Vector result4 result4_len public4 public4_len private_4 private_4_len valid4;
Vector result5 result5_len public5 public5_len private_5 private_5_len valid5;
Vector result6 result6_len public6 public6_len private_6 private_6_len valid6;
] in
assert_norm (List.Tot.length l = 7);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Curve25519.vectors} | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | false | false | let vectors_len:(x: UInt32.t{UInt32.v x = B.length vectors}) =
| 7ul | false |
Lib.Meta.fst | Lib.Meta.byte_of_hex | val byte_of_hex: a:hex_digit -> b:hex_digit -> int | val byte_of_hex: a:hex_digit -> b:hex_digit -> int | let byte_of_hex a b =
FStar.Mul.(int_of_hex a * 16 + int_of_hex b) | {
"file_name": "lib/Lib.Meta.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 46,
"end_line": 56,
"start_col": 0,
"start_line": 55
} | module Lib.Meta
open Lib.IntTypes
/// Helpers used in tests and tactics (see e.g. Test.LowStarize)
#set-options "--max_fuel 0 --max_ifuel 0 --z3rlimit 50"
val is_hex_digit: Char.char -> bool
let is_hex_digit = function
| '0'
| '1'
| '2'
| '3'
| '4'
| '5'
| '6'
| '7'
| '8'
| '9'
| 'a' | 'A'
| 'b' | 'B'
| 'c' | 'C'
| 'd' | 'D'
| 'e' | 'E'
| 'f' | 'F' -> true
| _ -> false
type hex_digit = c:Char.char{is_hex_digit c}
val int_of_hex: c:hex_digit -> int
let int_of_hex = function
| '0' -> 0
| '1' -> 1
| '2' -> 2
| '3' -> 3
| '4' -> 4
| '5' -> 5
| '6' -> 6
| '7' -> 7
| '8' -> 8
| '9' -> 9
| 'a' | 'A' -> 10
| 'b' | 'B' -> 11
| 'c' | 'C' -> 12
| 'd' | 'D' -> 13
| 'e' | 'E' -> 14
| 'f' | 'F' -> 15 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"Lib.IntTypes.fsti.checked",
"FStar.String.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.Char.fsti.checked"
],
"interface_file": false,
"source_file": "Lib.Meta.fst"
} | [
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | a: Lib.Meta.hex_digit -> b: Lib.Meta.hex_digit -> Prims.int | Prims.Tot | [
"total"
] | [] | [
"Lib.Meta.hex_digit",
"Prims.op_Addition",
"FStar.Mul.op_Star",
"Lib.Meta.int_of_hex",
"Prims.int"
] | [] | false | false | false | true | false | let byte_of_hex a b =
| let open FStar.Mul in int_of_hex a * 16 + int_of_hex b | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.private_5 | val private_5:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | val private_5:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | let private_5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x02uy; 0x03uy; 0x04uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 140,
"start_col": 0,
"start_line": 137
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul
inline_for_extraction let valid3 = true
let private_4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy; 0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy; 0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_4_len: (x:UInt32.t { UInt32.v x = B.length private_4 }) =
32ul
let public4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe6uy; 0xdbuy; 0x68uy; 0x67uy; 0x58uy; 0x30uy; 0x30uy; 0xdbuy; 0x35uy; 0x94uy; 0xc1uy; 0xa4uy; 0x24uy; 0xb1uy; 0x5fuy; 0x7cuy; 0x72uy; 0x66uy; 0x24uy; 0xecuy; 0x26uy; 0xb3uy; 0x35uy; 0x3buy; 0x10uy; 0xa9uy; 0x03uy; 0xa6uy; 0xd0uy; 0xabuy; 0x1cuy; 0x4cuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public4_len: (x:UInt32.t { UInt32.v x = B.length public4 }) =
32ul
let result4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xc3uy; 0xdauy; 0x55uy; 0x37uy; 0x9duy; 0xe9uy; 0xc6uy; 0x90uy; 0x8euy; 0x94uy; 0xeauy; 0x4duy; 0xf2uy; 0x8duy; 0x08uy; 0x4fuy; 0x32uy; 0xecuy; 0xcfuy; 0x03uy; 0x49uy; 0x1cuy; 0x71uy; 0xf7uy; 0x54uy; 0xb4uy; 0x07uy; 0x55uy; 0x77uy; 0xa2uy; 0x85uy; 0x52uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result4_len: (x:UInt32.t { UInt32.v x = B.length result4 }) =
32ul
inline_for_extraction let valid4 = true | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 32 /\ LowStar.Monotonic.Buffer.recallable b} | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | false | false | false | false | false | let private_5:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) =
| [@@ inline_let ]let l =
[
0x01uy; 0x02uy; 0x03uy; 0x04uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy;
0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy;
0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy
]
in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | false |
Test.Vectors.Curve25519.fst | Test.Vectors.Curve25519.public6 | val public6:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | val public6:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) | let public6: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe0uy; 0xebuy; 0x7auy; 0x7cuy; 0x3buy; 0x41uy; 0xb8uy; 0xaeuy; 0x16uy; 0x56uy; 0xe3uy; 0xfauy; 0xf1uy; 0x9fuy; 0xc4uy; 0x6auy; 0xdauy; 0x09uy; 0x8duy; 0xebuy; 0x9cuy; 0x32uy; 0xb1uy; 0xfduy; 0x86uy; 0x62uy; 0x05uy; 0x16uy; 0x5fuy; 0x49uy; 0xb8uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Curve25519.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 174,
"start_col": 0,
"start_line": 171
} | module Test.Vectors.Curve25519
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let private_0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x77uy; 0x07uy; 0x6duy; 0x0auy; 0x73uy; 0x18uy; 0xa5uy; 0x7duy; 0x3cuy; 0x16uy; 0xc1uy; 0x72uy; 0x51uy; 0xb2uy; 0x66uy; 0x45uy; 0xdfuy; 0x4cuy; 0x2fuy; 0x87uy; 0xebuy; 0xc0uy; 0x99uy; 0x2auy; 0xb1uy; 0x77uy; 0xfbuy; 0xa5uy; 0x1duy; 0xb9uy; 0x2cuy; 0x2auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_0_len: (x:UInt32.t { UInt32.v x = B.length private_0 }) =
32ul
let public0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdeuy; 0x9euy; 0xdbuy; 0x7duy; 0x7buy; 0x7duy; 0xc1uy; 0xb4uy; 0xd3uy; 0x5buy; 0x61uy; 0xc2uy; 0xecuy; 0xe4uy; 0x35uy; 0x37uy; 0x3fuy; 0x83uy; 0x43uy; 0xc8uy; 0x5buy; 0x78uy; 0x67uy; 0x4duy; 0xaduy; 0xfcuy; 0x7euy; 0x14uy; 0x6fuy; 0x88uy; 0x2buy; 0x4fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public0_len: (x:UInt32.t { UInt32.v x = B.length public0 }) =
32ul
let result0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result0_len: (x:UInt32.t { UInt32.v x = B.length result0 }) =
32ul
inline_for_extraction let valid0 = true
let private_1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5duy; 0xabuy; 0x08uy; 0x7euy; 0x62uy; 0x4auy; 0x8auy; 0x4buy; 0x79uy; 0xe1uy; 0x7fuy; 0x8buy; 0x83uy; 0x80uy; 0x0euy; 0xe6uy; 0x6fuy; 0x3buy; 0xb1uy; 0x29uy; 0x26uy; 0x18uy; 0xb6uy; 0xfduy; 0x1cuy; 0x2fuy; 0x8buy; 0x27uy; 0xffuy; 0x88uy; 0xe0uy; 0xebuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_1_len: (x:UInt32.t { UInt32.v x = B.length private_1 }) =
32ul
let public1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x20uy; 0xf0uy; 0x09uy; 0x89uy; 0x30uy; 0xa7uy; 0x54uy; 0x74uy; 0x8buy; 0x7duy; 0xdcuy; 0xb4uy; 0x3euy; 0xf7uy; 0x5auy; 0x0duy; 0xbfuy; 0x3auy; 0x0duy; 0x26uy; 0x38uy; 0x1auy; 0xf4uy; 0xebuy; 0xa4uy; 0xa9uy; 0x8euy; 0xaauy; 0x9buy; 0x4euy; 0x6auy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public1_len: (x:UInt32.t { UInt32.v x = B.length public1 }) =
32ul
let result1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4auy; 0x5duy; 0x9duy; 0x5buy; 0xa4uy; 0xceuy; 0x2duy; 0xe1uy; 0x72uy; 0x8euy; 0x3buy; 0xf4uy; 0x80uy; 0x35uy; 0x0fuy; 0x25uy; 0xe0uy; 0x7euy; 0x21uy; 0xc9uy; 0x47uy; 0xd1uy; 0x9euy; 0x33uy; 0x76uy; 0xf0uy; 0x9buy; 0x3cuy; 0x1euy; 0x16uy; 0x17uy; 0x42uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result1_len: (x:UInt32.t { UInt32.v x = B.length result1 }) =
32ul
inline_for_extraction let valid1 = true
let private_2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_2_len: (x:UInt32.t { UInt32.v x = B.length private_2 }) =
32ul
let public2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x25uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public2_len: (x:UInt32.t { UInt32.v x = B.length public2 }) =
32ul
let result2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x3cuy; 0x77uy; 0x77uy; 0xcauy; 0xf9uy; 0x97uy; 0xb2uy; 0x64uy; 0x41uy; 0x60uy; 0x77uy; 0x66uy; 0x5buy; 0x4euy; 0x22uy; 0x9duy; 0x0buy; 0x95uy; 0x48uy; 0xdcuy; 0x0cuy; 0xd8uy; 0x19uy; 0x98uy; 0xdduy; 0xcduy; 0xc5uy; 0xc8uy; 0x53uy; 0x3cuy; 0x79uy; 0x7fuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result2_len: (x:UInt32.t { UInt32.v x = B.length result2 }) =
32ul
inline_for_extraction let valid2 = true
let private_3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_3_len: (x:UInt32.t { UInt32.v x = B.length private_3 }) =
32ul
let public3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; 0xffuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public3_len: (x:UInt32.t { UInt32.v x = B.length public3 }) =
32ul
let result3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb3uy; 0x2duy; 0x13uy; 0x62uy; 0xc2uy; 0x48uy; 0xd6uy; 0x2fuy; 0xe6uy; 0x26uy; 0x19uy; 0xcfuy; 0xf0uy; 0x4duy; 0xd4uy; 0x3duy; 0xb7uy; 0x3fuy; 0xfcuy; 0x1buy; 0x63uy; 0x08uy; 0xeduy; 0xe3uy; 0x0buy; 0x78uy; 0xd8uy; 0x73uy; 0x80uy; 0xf1uy; 0xe8uy; 0x34uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result3_len: (x:UInt32.t { UInt32.v x = B.length result3 }) =
32ul
inline_for_extraction let valid3 = true
let private_4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xa5uy; 0x46uy; 0xe3uy; 0x6buy; 0xf0uy; 0x52uy; 0x7cuy; 0x9duy; 0x3buy; 0x16uy; 0x15uy; 0x4buy; 0x82uy; 0x46uy; 0x5euy; 0xdduy; 0x62uy; 0x14uy; 0x4cuy; 0x0auy; 0xc1uy; 0xfcuy; 0x5auy; 0x18uy; 0x50uy; 0x6auy; 0x22uy; 0x44uy; 0xbauy; 0x44uy; 0x9auy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_4_len: (x:UInt32.t { UInt32.v x = B.length private_4 }) =
32ul
let public4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xe6uy; 0xdbuy; 0x68uy; 0x67uy; 0x58uy; 0x30uy; 0x30uy; 0xdbuy; 0x35uy; 0x94uy; 0xc1uy; 0xa4uy; 0x24uy; 0xb1uy; 0x5fuy; 0x7cuy; 0x72uy; 0x66uy; 0x24uy; 0xecuy; 0x26uy; 0xb3uy; 0x35uy; 0x3buy; 0x10uy; 0xa9uy; 0x03uy; 0xa6uy; 0xd0uy; 0xabuy; 0x1cuy; 0x4cuy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public4_len: (x:UInt32.t { UInt32.v x = B.length public4 }) =
32ul
let result4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xc3uy; 0xdauy; 0x55uy; 0x37uy; 0x9duy; 0xe9uy; 0xc6uy; 0x90uy; 0x8euy; 0x94uy; 0xeauy; 0x4duy; 0xf2uy; 0x8duy; 0x08uy; 0x4fuy; 0x32uy; 0xecuy; 0xcfuy; 0x03uy; 0x49uy; 0x1cuy; 0x71uy; 0xf7uy; 0x54uy; 0xb4uy; 0x07uy; 0x55uy; 0x77uy; 0xa2uy; 0x85uy; 0x52uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result4_len: (x:UInt32.t { UInt32.v x = B.length result4 }) =
32ul
inline_for_extraction let valid4 = true
let private_5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x01uy; 0x02uy; 0x03uy; 0x04uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_5_len: (x:UInt32.t { UInt32.v x = B.length private_5 }) =
32ul
let public5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let public5_len: (x:UInt32.t { UInt32.v x = B.length public5 }) =
32ul
let result5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let result5_len: (x:UInt32.t { UInt32.v x = B.length result5 }) =
32ul
inline_for_extraction let valid5 = false
let private_6: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x02uy; 0x04uy; 0x06uy; 0x08uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let private_6_len: (x:UInt32.t { UInt32.v x = B.length private_6 }) =
32ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Curve25519.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 32 /\ LowStar.Monotonic.Buffer.recallable b} | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | false | false | false | false | false | let public6:(b: B.buffer UInt8.t {B.length b = 32 /\ B.recallable b}) =
| [@@ inline_let ]let l =
[
0xe0uy; 0xebuy; 0x7auy; 0x7cuy; 0x3buy; 0x41uy; 0xb8uy; 0xaeuy; 0x16uy; 0x56uy; 0xe3uy; 0xfauy;
0xf1uy; 0x9fuy; 0xc4uy; 0x6auy; 0xdauy; 0x09uy; 0x8duy; 0xebuy; 0x9cuy; 0x32uy; 0xb1uy; 0xfduy;
0x86uy; 0x62uy; 0x05uy; 0x16uy; 0x5fuy; 0x49uy; 0xb8uy; 0x00uy
]
in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.