effect
stringclasses 48
values | original_source_type
stringlengths 0
23k
| opens_and_abbrevs
listlengths 2
92
| isa_cross_project_example
bool 1
class | source_definition
stringlengths 9
57.9k
| partial_definition
stringlengths 7
23.3k
| is_div
bool 2
classes | is_type
null | is_proof
bool 2
classes | completed_definiton
stringlengths 1
250k
| dependencies
dict | effect_flags
sequencelengths 0
2
| ideal_premises
sequencelengths 0
236
| mutual_with
sequencelengths 0
11
| file_context
stringlengths 0
407k
| interleaved
bool 1
class | is_simply_typed
bool 2
classes | file_name
stringlengths 5
48
| vconfig
dict | is_simple_lemma
null | source_type
stringlengths 10
23k
| proof_features
sequencelengths 0
1
| name
stringlengths 8
95
| source
dict | verbose_type
stringlengths 1
7.42k
| source_range
dict |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Prims.Tot | val key1_len:(x: UInt32.t{UInt32.v x = B.length key1}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul | val key1_len:(x: UInt32.t{UInt32.v x = B.length key1})
let key1_len:(x: UInt32.t{UInt32.v x = B.length key1}) = | false | null | false | 16ul | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"FStar.UInt32.__uint_to_t"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val key1_len:(x: UInt32.t{UInt32.v x = B.length key1}) | [] | Test.Vectors.Aes128Gcm.key1_len | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Aes128Gcm.key1} | {
"end_col": 6,
"end_line": 61,
"start_col": 2,
"start_line": 61
} |
Prims.Tot | val output2_len:(x: UInt32.t{UInt32.v x = B.length output2}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let output2_len: (x:UInt32.t { UInt32.v x = B.length output2 }) =
64ul | val output2_len:(x: UInt32.t{UInt32.v x = B.length output2})
let output2_len:(x: UInt32.t{UInt32.v x = B.length output2}) = | false | null | false | 64ul | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"FStar.UInt32.__uint_to_t"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy; 0x71uy; 0xb2uy; 0xfeuy; 0x78uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul
let key2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key2_len: (x:UInt32.t { UInt32.v x = B.length key2 }) =
16ul
let nonce2: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce2_len: (x:UInt32.t { UInt32.v x = B.length nonce2 }) =
12ul
let aad2: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad2_len: (x:UInt32.t { UInt32.v x = B.length aad2 }) =
0ul
let input2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b /\ B.disjoint b aad2 }) =
B.recall aad2;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; 0x1auy; 0xafuy; 0xd2uy; 0x55uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input2_len: (x:UInt32.t { UInt32.v x = B.length input2 }) =
64ul
let tag2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4duy; 0x5cuy; 0x2auy; 0xf3uy; 0x27uy; 0xcduy; 0x64uy; 0xa6uy; 0x2cuy; 0xf3uy; 0x5auy; 0xbduy; 0x2buy; 0xa6uy; 0xfauy; 0xb4uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag2_len: (x:UInt32.t { UInt32.v x = B.length tag2 }) =
16ul
let output2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b }) =
[@inline_let] let l = [ 0x42uy; 0x83uy; 0x1euy; 0xc2uy; 0x21uy; 0x77uy; 0x74uy; 0x24uy; 0x4buy; 0x72uy; 0x21uy; 0xb7uy; 0x84uy; 0xd0uy; 0xd4uy; 0x9cuy; 0xe3uy; 0xaauy; 0x21uy; 0x2fuy; 0x2cuy; 0x02uy; 0xa4uy; 0xe0uy; 0x35uy; 0xc1uy; 0x7euy; 0x23uy; 0x29uy; 0xacuy; 0xa1uy; 0x2euy; 0x21uy; 0xd5uy; 0x14uy; 0xb2uy; 0x54uy; 0x66uy; 0x93uy; 0x1cuy; 0x7duy; 0x8fuy; 0x6auy; 0x5auy; 0xacuy; 0x84uy; 0xaauy; 0x05uy; 0x1buy; 0xa3uy; 0x0buy; 0x39uy; 0x6auy; 0x0auy; 0xacuy; 0x97uy; 0x3duy; 0x58uy; 0xe0uy; 0x91uy; 0x47uy; 0x3fuy; 0x59uy; 0x85uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val output2_len:(x: UInt32.t{UInt32.v x = B.length output2}) | [] | Test.Vectors.Aes128Gcm.output2_len | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Aes128Gcm.output2} | {
"end_col": 6,
"end_line": 149,
"start_col": 2,
"start_line": 149
} |
Prims.Tot | val input2_len:(x: UInt32.t{UInt32.v x = B.length input2}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let input2_len: (x:UInt32.t { UInt32.v x = B.length input2 }) =
64ul | val input2_len:(x: UInt32.t{UInt32.v x = B.length input2})
let input2_len:(x: UInt32.t{UInt32.v x = B.length input2}) = | false | null | false | 64ul | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"FStar.UInt32.__uint_to_t"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy; 0x71uy; 0xb2uy; 0xfeuy; 0x78uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul
let key2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key2_len: (x:UInt32.t { UInt32.v x = B.length key2 }) =
16ul
let nonce2: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce2_len: (x:UInt32.t { UInt32.v x = B.length nonce2 }) =
12ul
let aad2: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad2_len: (x:UInt32.t { UInt32.v x = B.length aad2 }) =
0ul
let input2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b /\ B.disjoint b aad2 }) =
B.recall aad2;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; 0x1auy; 0xafuy; 0xd2uy; 0x55uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val input2_len:(x: UInt32.t{UInt32.v x = B.length input2}) | [] | Test.Vectors.Aes128Gcm.input2_len | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Aes128Gcm.input2} | {
"end_col": 6,
"end_line": 133,
"start_col": 2,
"start_line": 133
} |
Prims.Tot | val aad0_len:(x: UInt32.t{UInt32.v x = B.length aad0}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul | val aad0_len:(x: UInt32.t{UInt32.v x = B.length aad0})
let aad0_len:(x: UInt32.t{UInt32.v x = B.length aad0}) = | false | null | false | 0ul | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"FStar.UInt32.__uint_to_t"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val aad0_len:(x: UInt32.t{UInt32.v x = B.length aad0}) | [] | Test.Vectors.Aes128Gcm.aad0_len | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Aes128Gcm.aad0} | {
"end_col": 5,
"end_line": 29,
"start_col": 2,
"start_line": 29
} |
Prims.Tot | val output0:(b: B.buffer UInt8.t {B.length b = 0 /\ B.recallable b}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l | val output0:(b: B.buffer UInt8.t {B.length b = 0 /\ B.recallable b})
let output0:(b: B.buffer UInt8.t {B.length b = 0 /\ B.recallable b}) = | false | null | false | [@@ inline_let ]let l = [] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Nil"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val output0:(b: B.buffer UInt8.t {B.length b = 0 /\ B.recallable b}) | [] | Test.Vectors.Aes128Gcm.output0 | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 0 /\ LowStar.Monotonic.Buffer.recallable b} | {
"end_col": 38,
"end_line": 50,
"start_col": 2,
"start_line": 48
} |
Prims.Tot | val vectors_len:(x: UInt32.t{UInt32.v x = B.length vectors}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let vectors_len: (x:UInt32.t { UInt32.v x = B.length vectors }) =
4ul | val vectors_len:(x: UInt32.t{UInt32.v x = B.length vectors})
let vectors_len:(x: UInt32.t{UInt32.v x = B.length vectors}) = | false | null | false | 4ul | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"FStar.UInt32.__uint_to_t"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy; 0x71uy; 0xb2uy; 0xfeuy; 0x78uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul
let key2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key2_len: (x:UInt32.t { UInt32.v x = B.length key2 }) =
16ul
let nonce2: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce2_len: (x:UInt32.t { UInt32.v x = B.length nonce2 }) =
12ul
let aad2: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad2_len: (x:UInt32.t { UInt32.v x = B.length aad2 }) =
0ul
let input2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b /\ B.disjoint b aad2 }) =
B.recall aad2;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; 0x1auy; 0xafuy; 0xd2uy; 0x55uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input2_len: (x:UInt32.t { UInt32.v x = B.length input2 }) =
64ul
let tag2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4duy; 0x5cuy; 0x2auy; 0xf3uy; 0x27uy; 0xcduy; 0x64uy; 0xa6uy; 0x2cuy; 0xf3uy; 0x5auy; 0xbduy; 0x2buy; 0xa6uy; 0xfauy; 0xb4uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag2_len: (x:UInt32.t { UInt32.v x = B.length tag2 }) =
16ul
let output2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b }) =
[@inline_let] let l = [ 0x42uy; 0x83uy; 0x1euy; 0xc2uy; 0x21uy; 0x77uy; 0x74uy; 0x24uy; 0x4buy; 0x72uy; 0x21uy; 0xb7uy; 0x84uy; 0xd0uy; 0xd4uy; 0x9cuy; 0xe3uy; 0xaauy; 0x21uy; 0x2fuy; 0x2cuy; 0x02uy; 0xa4uy; 0xe0uy; 0x35uy; 0xc1uy; 0x7euy; 0x23uy; 0x29uy; 0xacuy; 0xa1uy; 0x2euy; 0x21uy; 0xd5uy; 0x14uy; 0xb2uy; 0x54uy; 0x66uy; 0x93uy; 0x1cuy; 0x7duy; 0x8fuy; 0x6auy; 0x5auy; 0xacuy; 0x84uy; 0xaauy; 0x05uy; 0x1buy; 0xa3uy; 0x0buy; 0x39uy; 0x6auy; 0x0auy; 0xacuy; 0x97uy; 0x3duy; 0x58uy; 0xe0uy; 0x91uy; 0x47uy; 0x3fuy; 0x59uy; 0x85uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output2_len: (x:UInt32.t { UInt32.v x = B.length output2 }) =
64ul
let key3: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key3_len: (x:UInt32.t { UInt32.v x = B.length key3 }) =
16ul
let nonce3: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce3_len: (x:UInt32.t { UInt32.v x = B.length nonce3 }) =
12ul
let aad3: (b: B.buffer UInt8.t { B.length b = 20 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xeduy; 0xfauy; 0xceuy; 0xdeuy; 0xaduy; 0xbeuy; 0xefuy; 0xfeuy; 0xeduy; 0xfauy; 0xceuy; 0xdeuy; 0xaduy; 0xbeuy; 0xefuy; 0xabuy; 0xaduy; 0xdauy; 0xd2uy; ] in
assert_norm (List.Tot.length l = 20);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad3_len: (x:UInt32.t { UInt32.v x = B.length aad3 }) =
20ul
let input3: (b: B.buffer UInt8.t { B.length b = 60 /\ B.recallable b /\ B.disjoint b aad3 }) =
B.recall aad3;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; ] in
assert_norm (List.Tot.length l = 60);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input3_len: (x:UInt32.t { UInt32.v x = B.length input3 }) =
60ul
let tag3: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5buy; 0xc9uy; 0x4fuy; 0xbcuy; 0x32uy; 0x21uy; 0xa5uy; 0xdbuy; 0x94uy; 0xfauy; 0xe9uy; 0x5auy; 0xe7uy; 0x12uy; 0x1auy; 0x47uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag3_len: (x:UInt32.t { UInt32.v x = B.length tag3 }) =
16ul
let output3: (b: B.buffer UInt8.t { B.length b = 60 /\ B.recallable b }) =
[@inline_let] let l = [ 0x42uy; 0x83uy; 0x1euy; 0xc2uy; 0x21uy; 0x77uy; 0x74uy; 0x24uy; 0x4buy; 0x72uy; 0x21uy; 0xb7uy; 0x84uy; 0xd0uy; 0xd4uy; 0x9cuy; 0xe3uy; 0xaauy; 0x21uy; 0x2fuy; 0x2cuy; 0x02uy; 0xa4uy; 0xe0uy; 0x35uy; 0xc1uy; 0x7euy; 0x23uy; 0x29uy; 0xacuy; 0xa1uy; 0x2euy; 0x21uy; 0xd5uy; 0x14uy; 0xb2uy; 0x54uy; 0x66uy; 0x93uy; 0x1cuy; 0x7duy; 0x8fuy; 0x6auy; 0x5auy; 0xacuy; 0x84uy; 0xaauy; 0x05uy; 0x1buy; 0xa3uy; 0x0buy; 0x39uy; 0x6auy; 0x0auy; 0xacuy; 0x97uy; 0x3duy; 0x58uy; 0xe0uy; 0x91uy; ] in
assert_norm (List.Tot.length l = 60);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output3_len: (x:UInt32.t { UInt32.v x = B.length output3 }) =
60ul
noeq
type vector = | Vector:
output: B.buffer UInt8.t { B.recallable output } ->
output_len: UInt32.t { B.length output = UInt32.v output_len } ->
tag: B.buffer UInt8.t { B.recallable tag } ->
tag_len: UInt32.t { B.length tag = UInt32.v tag_len } ->
input: B.buffer UInt8.t { B.recallable input } ->
input_len: UInt32.t { B.length input = UInt32.v input_len } ->
aad: B.buffer UInt8.t { B.recallable aad /\ B.disjoint input aad } ->
aad_len: UInt32.t { B.length aad = UInt32.v aad_len } ->
nonce: B.buffer UInt8.t { B.recallable nonce } ->
nonce_len: UInt32.t { B.length nonce = UInt32.v nonce_len } ->
key: B.buffer UInt8.t { B.recallable key } ->
key_len: UInt32.t { B.length key = UInt32.v key_len } ->
vector
let vectors: (b: B.buffer vector { B.length b = 4 /\ B.recallable b }) =
[@inline_let] let l = [
Vector output0 output0_len tag0 tag0_len input0 input0_len aad0 aad0_len nonce0 nonce0_len key0 key0_len ;
Vector output1 output1_len tag1 tag1_len input1 input1_len aad1 aad1_len nonce1 nonce1_len key1 key1_len ;
Vector output2 output2_len tag2 tag2_len input2 input2_len aad2 aad2_len nonce2 nonce2_len key2 key2_len ;
Vector output3 output3_len tag3 tag3_len input3 input3_len aad3 aad3_len nonce3 nonce3_len key3 key3_len ;
] in
assert_norm (List.Tot.length l = 4);
B.gcmalloc_of_list HyperStack.root l | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val vectors_len:(x: UInt32.t{UInt32.v x = B.length vectors}) | [] | Test.Vectors.Aes128Gcm.vectors_len | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Aes128Gcm.vectors} | {
"end_col": 5,
"end_line": 226,
"start_col": 2,
"start_line": 226
} |
Prims.Tot | val input1:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l | val input1:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1})
let input1:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1}) = | false | null | false | B.recall aad1;
[@@ inline_let ]let l =
[
0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy;
0x00uy; 0x00uy; 0x00uy; 0x00uy
]
in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"LowStar.Monotonic.Buffer.disjoint",
"Test.Vectors.Aes128Gcm.aad1",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil",
"LowStar.Monotonic.Buffer.recall"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val input1:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1}) | [] | Test.Vectors.Aes128Gcm.input1 | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b:
LowStar.Buffer.buffer FStar.UInt8.t
{ LowStar.Monotonic.Buffer.length b = 16 /\ LowStar.Monotonic.Buffer.recallable b /\
LowStar.Monotonic.Buffer.disjoint b Test.Vectors.Aes128Gcm.aad1 } | {
"end_col": 38,
"end_line": 82,
"start_col": 5,
"start_line": 80
} |
Prims.Tot | val aad1_len:(x: UInt32.t{UInt32.v x = B.length aad1}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul | val aad1_len:(x: UInt32.t{UInt32.v x = B.length aad1})
let aad1_len:(x: UInt32.t{UInt32.v x = B.length aad1}) = | false | null | false | 0ul | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"FStar.UInt32.__uint_to_t"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val aad1_len:(x: UInt32.t{UInt32.v x = B.length aad1}) | [] | Test.Vectors.Aes128Gcm.aad1_len | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Aes128Gcm.aad1} | {
"end_col": 5,
"end_line": 77,
"start_col": 2,
"start_line": 77
} |
Prims.Tot | val key3:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let key3: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l | val key3:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b})
let key3:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b}) = | false | null | false | [@@ inline_let ]let l =
[
0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy;
0x67uy; 0x30uy; 0x83uy; 0x08uy
]
in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy; 0x71uy; 0xb2uy; 0xfeuy; 0x78uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul
let key2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key2_len: (x:UInt32.t { UInt32.v x = B.length key2 }) =
16ul
let nonce2: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce2_len: (x:UInt32.t { UInt32.v x = B.length nonce2 }) =
12ul
let aad2: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad2_len: (x:UInt32.t { UInt32.v x = B.length aad2 }) =
0ul
let input2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b /\ B.disjoint b aad2 }) =
B.recall aad2;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; 0x1auy; 0xafuy; 0xd2uy; 0x55uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input2_len: (x:UInt32.t { UInt32.v x = B.length input2 }) =
64ul
let tag2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4duy; 0x5cuy; 0x2auy; 0xf3uy; 0x27uy; 0xcduy; 0x64uy; 0xa6uy; 0x2cuy; 0xf3uy; 0x5auy; 0xbduy; 0x2buy; 0xa6uy; 0xfauy; 0xb4uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag2_len: (x:UInt32.t { UInt32.v x = B.length tag2 }) =
16ul
let output2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b }) =
[@inline_let] let l = [ 0x42uy; 0x83uy; 0x1euy; 0xc2uy; 0x21uy; 0x77uy; 0x74uy; 0x24uy; 0x4buy; 0x72uy; 0x21uy; 0xb7uy; 0x84uy; 0xd0uy; 0xd4uy; 0x9cuy; 0xe3uy; 0xaauy; 0x21uy; 0x2fuy; 0x2cuy; 0x02uy; 0xa4uy; 0xe0uy; 0x35uy; 0xc1uy; 0x7euy; 0x23uy; 0x29uy; 0xacuy; 0xa1uy; 0x2euy; 0x21uy; 0xd5uy; 0x14uy; 0xb2uy; 0x54uy; 0x66uy; 0x93uy; 0x1cuy; 0x7duy; 0x8fuy; 0x6auy; 0x5auy; 0xacuy; 0x84uy; 0xaauy; 0x05uy; 0x1buy; 0xa3uy; 0x0buy; 0x39uy; 0x6auy; 0x0auy; 0xacuy; 0x97uy; 0x3duy; 0x58uy; 0xe0uy; 0x91uy; 0x47uy; 0x3fuy; 0x59uy; 0x85uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output2_len: (x:UInt32.t { UInt32.v x = B.length output2 }) =
64ul | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val key3:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b}) | [] | Test.Vectors.Aes128Gcm.key3 | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 16 /\ LowStar.Monotonic.Buffer.recallable b} | {
"end_col": 38,
"end_line": 154,
"start_col": 2,
"start_line": 152
} |
Prims.Tot | val nonce0_len:(x: UInt32.t{UInt32.v x = B.length nonce0}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul | val nonce0_len:(x: UInt32.t{UInt32.v x = B.length nonce0})
let nonce0_len:(x: UInt32.t{UInt32.v x = B.length nonce0}) = | false | null | false | 12ul | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"FStar.UInt32.__uint_to_t"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val nonce0_len:(x: UInt32.t{UInt32.v x = B.length nonce0}) | [] | Test.Vectors.Aes128Gcm.nonce0_len | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Aes128Gcm.nonce0} | {
"end_col": 6,
"end_line": 21,
"start_col": 2,
"start_line": 21
} |
Prims.Tot | val key2_len:(x: UInt32.t{UInt32.v x = B.length key2}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let key2_len: (x:UInt32.t { UInt32.v x = B.length key2 }) =
16ul | val key2_len:(x: UInt32.t{UInt32.v x = B.length key2})
let key2_len:(x: UInt32.t{UInt32.v x = B.length key2}) = | false | null | false | 16ul | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"FStar.UInt32.__uint_to_t"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy; 0x71uy; 0xb2uy; 0xfeuy; 0x78uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul
let key2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val key2_len:(x: UInt32.t{UInt32.v x = B.length key2}) | [] | Test.Vectors.Aes128Gcm.key2_len | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Aes128Gcm.key2} | {
"end_col": 6,
"end_line": 109,
"start_col": 2,
"start_line": 109
} |
Prims.Tot | val tag1_len:(x: UInt32.t{UInt32.v x = B.length tag1}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul | val tag1_len:(x: UInt32.t{UInt32.v x = B.length tag1})
let tag1_len:(x: UInt32.t{UInt32.v x = B.length tag1}) = | false | null | false | 16ul | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"FStar.UInt32.__uint_to_t"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tag1_len:(x: UInt32.t{UInt32.v x = B.length tag1}) | [] | Test.Vectors.Aes128Gcm.tag1_len | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Aes128Gcm.tag1} | {
"end_col": 6,
"end_line": 93,
"start_col": 2,
"start_line": 93
} |
Prims.Tot | val key2:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let key2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l | val key2:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b})
let key2:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b}) = | false | null | false | [@@ inline_let ]let l =
[
0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy;
0x67uy; 0x30uy; 0x83uy; 0x08uy
]
in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy; 0x71uy; 0xb2uy; 0xfeuy; 0x78uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val key2:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b}) | [] | Test.Vectors.Aes128Gcm.key2 | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 16 /\ LowStar.Monotonic.Buffer.recallable b} | {
"end_col": 38,
"end_line": 106,
"start_col": 2,
"start_line": 104
} |
Prims.Tot | val aad2_len:(x: UInt32.t{UInt32.v x = B.length aad2}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let aad2_len: (x:UInt32.t { UInt32.v x = B.length aad2 }) =
0ul | val aad2_len:(x: UInt32.t{UInt32.v x = B.length aad2})
let aad2_len:(x: UInt32.t{UInt32.v x = B.length aad2}) = | false | null | false | 0ul | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"FStar.UInt32.__uint_to_t"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy; 0x71uy; 0xb2uy; 0xfeuy; 0x78uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul
let key2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key2_len: (x:UInt32.t { UInt32.v x = B.length key2 }) =
16ul
let nonce2: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce2_len: (x:UInt32.t { UInt32.v x = B.length nonce2 }) =
12ul
let aad2: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val aad2_len:(x: UInt32.t{UInt32.v x = B.length aad2}) | [] | Test.Vectors.Aes128Gcm.aad2_len | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Aes128Gcm.aad2} | {
"end_col": 5,
"end_line": 125,
"start_col": 2,
"start_line": 125
} |
Prims.Tot | val input0:(b: B.buffer UInt8.t {B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l | val input0:(b: B.buffer UInt8.t {B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0})
let input0:(b: B.buffer UInt8.t {B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0}) = | false | null | false | B.recall aad0;
[@@ inline_let ]let l = [] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"LowStar.Monotonic.Buffer.disjoint",
"Test.Vectors.Aes128Gcm.aad0",
"Prims.list",
"Prims.Nil",
"LowStar.Monotonic.Buffer.recall"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val input0:(b: B.buffer UInt8.t {B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0}) | [] | Test.Vectors.Aes128Gcm.input0 | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b:
LowStar.Buffer.buffer FStar.UInt8.t
{ LowStar.Monotonic.Buffer.length b = 0 /\ LowStar.Monotonic.Buffer.recallable b /\
LowStar.Monotonic.Buffer.disjoint b Test.Vectors.Aes128Gcm.aad0 } | {
"end_col": 38,
"end_line": 34,
"start_col": 5,
"start_line": 32
} |
Prims.Tot | val tag2:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tag2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4duy; 0x5cuy; 0x2auy; 0xf3uy; 0x27uy; 0xcduy; 0x64uy; 0xa6uy; 0x2cuy; 0xf3uy; 0x5auy; 0xbduy; 0x2buy; 0xa6uy; 0xfauy; 0xb4uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l | val tag2:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b})
let tag2:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b}) = | false | null | false | [@@ inline_let ]let l =
[
0x4duy; 0x5cuy; 0x2auy; 0xf3uy; 0x27uy; 0xcduy; 0x64uy; 0xa6uy; 0x2cuy; 0xf3uy; 0x5auy; 0xbduy;
0x2buy; 0xa6uy; 0xfauy; 0xb4uy
]
in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy; 0x71uy; 0xb2uy; 0xfeuy; 0x78uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul
let key2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key2_len: (x:UInt32.t { UInt32.v x = B.length key2 }) =
16ul
let nonce2: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce2_len: (x:UInt32.t { UInt32.v x = B.length nonce2 }) =
12ul
let aad2: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad2_len: (x:UInt32.t { UInt32.v x = B.length aad2 }) =
0ul
let input2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b /\ B.disjoint b aad2 }) =
B.recall aad2;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; 0x1auy; 0xafuy; 0xd2uy; 0x55uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input2_len: (x:UInt32.t { UInt32.v x = B.length input2 }) =
64ul | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tag2:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b}) | [] | Test.Vectors.Aes128Gcm.tag2 | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 16 /\ LowStar.Monotonic.Buffer.recallable b} | {
"end_col": 38,
"end_line": 138,
"start_col": 2,
"start_line": 136
} |
Prims.Tot | val key1:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l | val key1:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b})
let key1:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b}) = | false | null | false | [@@ inline_let ]let l =
[
0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy;
0x00uy; 0x00uy; 0x00uy; 0x00uy
]
in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val key1:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b}) | [] | Test.Vectors.Aes128Gcm.key1 | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 16 /\ LowStar.Monotonic.Buffer.recallable b} | {
"end_col": 38,
"end_line": 58,
"start_col": 2,
"start_line": 56
} |
Prims.Tot | val input3_len:(x: UInt32.t{UInt32.v x = B.length input3}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let input3_len: (x:UInt32.t { UInt32.v x = B.length input3 }) =
60ul | val input3_len:(x: UInt32.t{UInt32.v x = B.length input3})
let input3_len:(x: UInt32.t{UInt32.v x = B.length input3}) = | false | null | false | 60ul | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"FStar.UInt32.__uint_to_t"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy; 0x71uy; 0xb2uy; 0xfeuy; 0x78uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul
let key2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key2_len: (x:UInt32.t { UInt32.v x = B.length key2 }) =
16ul
let nonce2: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce2_len: (x:UInt32.t { UInt32.v x = B.length nonce2 }) =
12ul
let aad2: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad2_len: (x:UInt32.t { UInt32.v x = B.length aad2 }) =
0ul
let input2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b /\ B.disjoint b aad2 }) =
B.recall aad2;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; 0x1auy; 0xafuy; 0xd2uy; 0x55uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input2_len: (x:UInt32.t { UInt32.v x = B.length input2 }) =
64ul
let tag2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4duy; 0x5cuy; 0x2auy; 0xf3uy; 0x27uy; 0xcduy; 0x64uy; 0xa6uy; 0x2cuy; 0xf3uy; 0x5auy; 0xbduy; 0x2buy; 0xa6uy; 0xfauy; 0xb4uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag2_len: (x:UInt32.t { UInt32.v x = B.length tag2 }) =
16ul
let output2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b }) =
[@inline_let] let l = [ 0x42uy; 0x83uy; 0x1euy; 0xc2uy; 0x21uy; 0x77uy; 0x74uy; 0x24uy; 0x4buy; 0x72uy; 0x21uy; 0xb7uy; 0x84uy; 0xd0uy; 0xd4uy; 0x9cuy; 0xe3uy; 0xaauy; 0x21uy; 0x2fuy; 0x2cuy; 0x02uy; 0xa4uy; 0xe0uy; 0x35uy; 0xc1uy; 0x7euy; 0x23uy; 0x29uy; 0xacuy; 0xa1uy; 0x2euy; 0x21uy; 0xd5uy; 0x14uy; 0xb2uy; 0x54uy; 0x66uy; 0x93uy; 0x1cuy; 0x7duy; 0x8fuy; 0x6auy; 0x5auy; 0xacuy; 0x84uy; 0xaauy; 0x05uy; 0x1buy; 0xa3uy; 0x0buy; 0x39uy; 0x6auy; 0x0auy; 0xacuy; 0x97uy; 0x3duy; 0x58uy; 0xe0uy; 0x91uy; 0x47uy; 0x3fuy; 0x59uy; 0x85uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output2_len: (x:UInt32.t { UInt32.v x = B.length output2 }) =
64ul
let key3: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key3_len: (x:UInt32.t { UInt32.v x = B.length key3 }) =
16ul
let nonce3: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce3_len: (x:UInt32.t { UInt32.v x = B.length nonce3 }) =
12ul
let aad3: (b: B.buffer UInt8.t { B.length b = 20 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xeduy; 0xfauy; 0xceuy; 0xdeuy; 0xaduy; 0xbeuy; 0xefuy; 0xfeuy; 0xeduy; 0xfauy; 0xceuy; 0xdeuy; 0xaduy; 0xbeuy; 0xefuy; 0xabuy; 0xaduy; 0xdauy; 0xd2uy; ] in
assert_norm (List.Tot.length l = 20);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad3_len: (x:UInt32.t { UInt32.v x = B.length aad3 }) =
20ul
let input3: (b: B.buffer UInt8.t { B.length b = 60 /\ B.recallable b /\ B.disjoint b aad3 }) =
B.recall aad3;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; ] in
assert_norm (List.Tot.length l = 60);
B.gcmalloc_of_list HyperStack.root l | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val input3_len:(x: UInt32.t{UInt32.v x = B.length input3}) | [] | Test.Vectors.Aes128Gcm.input3_len | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Aes128Gcm.input3} | {
"end_col": 6,
"end_line": 181,
"start_col": 2,
"start_line": 181
} |
Prims.Tot | val aad3_len:(x: UInt32.t{UInt32.v x = B.length aad3}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let aad3_len: (x:UInt32.t { UInt32.v x = B.length aad3 }) =
20ul | val aad3_len:(x: UInt32.t{UInt32.v x = B.length aad3})
let aad3_len:(x: UInt32.t{UInt32.v x = B.length aad3}) = | false | null | false | 20ul | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"FStar.UInt32.__uint_to_t"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy; 0x71uy; 0xb2uy; 0xfeuy; 0x78uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul
let key2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key2_len: (x:UInt32.t { UInt32.v x = B.length key2 }) =
16ul
let nonce2: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce2_len: (x:UInt32.t { UInt32.v x = B.length nonce2 }) =
12ul
let aad2: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad2_len: (x:UInt32.t { UInt32.v x = B.length aad2 }) =
0ul
let input2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b /\ B.disjoint b aad2 }) =
B.recall aad2;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; 0x1auy; 0xafuy; 0xd2uy; 0x55uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input2_len: (x:UInt32.t { UInt32.v x = B.length input2 }) =
64ul
let tag2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4duy; 0x5cuy; 0x2auy; 0xf3uy; 0x27uy; 0xcduy; 0x64uy; 0xa6uy; 0x2cuy; 0xf3uy; 0x5auy; 0xbduy; 0x2buy; 0xa6uy; 0xfauy; 0xb4uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag2_len: (x:UInt32.t { UInt32.v x = B.length tag2 }) =
16ul
let output2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b }) =
[@inline_let] let l = [ 0x42uy; 0x83uy; 0x1euy; 0xc2uy; 0x21uy; 0x77uy; 0x74uy; 0x24uy; 0x4buy; 0x72uy; 0x21uy; 0xb7uy; 0x84uy; 0xd0uy; 0xd4uy; 0x9cuy; 0xe3uy; 0xaauy; 0x21uy; 0x2fuy; 0x2cuy; 0x02uy; 0xa4uy; 0xe0uy; 0x35uy; 0xc1uy; 0x7euy; 0x23uy; 0x29uy; 0xacuy; 0xa1uy; 0x2euy; 0x21uy; 0xd5uy; 0x14uy; 0xb2uy; 0x54uy; 0x66uy; 0x93uy; 0x1cuy; 0x7duy; 0x8fuy; 0x6auy; 0x5auy; 0xacuy; 0x84uy; 0xaauy; 0x05uy; 0x1buy; 0xa3uy; 0x0buy; 0x39uy; 0x6auy; 0x0auy; 0xacuy; 0x97uy; 0x3duy; 0x58uy; 0xe0uy; 0x91uy; 0x47uy; 0x3fuy; 0x59uy; 0x85uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output2_len: (x:UInt32.t { UInt32.v x = B.length output2 }) =
64ul
let key3: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key3_len: (x:UInt32.t { UInt32.v x = B.length key3 }) =
16ul
let nonce3: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce3_len: (x:UInt32.t { UInt32.v x = B.length nonce3 }) =
12ul
let aad3: (b: B.buffer UInt8.t { B.length b = 20 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xeduy; 0xfauy; 0xceuy; 0xdeuy; 0xaduy; 0xbeuy; 0xefuy; 0xfeuy; 0xeduy; 0xfauy; 0xceuy; 0xdeuy; 0xaduy; 0xbeuy; 0xefuy; 0xabuy; 0xaduy; 0xdauy; 0xd2uy; ] in
assert_norm (List.Tot.length l = 20);
B.gcmalloc_of_list HyperStack.root l | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val aad3_len:(x: UInt32.t{UInt32.v x = B.length aad3}) | [] | Test.Vectors.Aes128Gcm.aad3_len | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: FStar.UInt32.t{FStar.UInt32.v x = LowStar.Monotonic.Buffer.length Test.Vectors.Aes128Gcm.aad3} | {
"end_col": 6,
"end_line": 173,
"start_col": 2,
"start_line": 173
} |
Prims.Tot | val aad3:(b: B.buffer UInt8.t {B.length b = 20 /\ B.recallable b}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let aad3: (b: B.buffer UInt8.t { B.length b = 20 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xeduy; 0xfauy; 0xceuy; 0xdeuy; 0xaduy; 0xbeuy; 0xefuy; 0xfeuy; 0xeduy; 0xfauy; 0xceuy; 0xdeuy; 0xaduy; 0xbeuy; 0xefuy; 0xabuy; 0xaduy; 0xdauy; 0xd2uy; ] in
assert_norm (List.Tot.length l = 20);
B.gcmalloc_of_list HyperStack.root l | val aad3:(b: B.buffer UInt8.t {B.length b = 20 /\ B.recallable b})
let aad3:(b: B.buffer UInt8.t {B.length b = 20 /\ B.recallable b}) = | false | null | false | [@@ inline_let ]let l =
[
0xfeuy; 0xeduy; 0xfauy; 0xceuy; 0xdeuy; 0xaduy; 0xbeuy; 0xefuy; 0xfeuy; 0xeduy; 0xfauy; 0xceuy;
0xdeuy; 0xaduy; 0xbeuy; 0xefuy; 0xabuy; 0xaduy; 0xdauy; 0xd2uy
]
in
assert_norm (List.Tot.length l = 20);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy; 0x71uy; 0xb2uy; 0xfeuy; 0x78uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul
let key2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key2_len: (x:UInt32.t { UInt32.v x = B.length key2 }) =
16ul
let nonce2: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce2_len: (x:UInt32.t { UInt32.v x = B.length nonce2 }) =
12ul
let aad2: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad2_len: (x:UInt32.t { UInt32.v x = B.length aad2 }) =
0ul
let input2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b /\ B.disjoint b aad2 }) =
B.recall aad2;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; 0x1auy; 0xafuy; 0xd2uy; 0x55uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input2_len: (x:UInt32.t { UInt32.v x = B.length input2 }) =
64ul
let tag2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4duy; 0x5cuy; 0x2auy; 0xf3uy; 0x27uy; 0xcduy; 0x64uy; 0xa6uy; 0x2cuy; 0xf3uy; 0x5auy; 0xbduy; 0x2buy; 0xa6uy; 0xfauy; 0xb4uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag2_len: (x:UInt32.t { UInt32.v x = B.length tag2 }) =
16ul
let output2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b }) =
[@inline_let] let l = [ 0x42uy; 0x83uy; 0x1euy; 0xc2uy; 0x21uy; 0x77uy; 0x74uy; 0x24uy; 0x4buy; 0x72uy; 0x21uy; 0xb7uy; 0x84uy; 0xd0uy; 0xd4uy; 0x9cuy; 0xe3uy; 0xaauy; 0x21uy; 0x2fuy; 0x2cuy; 0x02uy; 0xa4uy; 0xe0uy; 0x35uy; 0xc1uy; 0x7euy; 0x23uy; 0x29uy; 0xacuy; 0xa1uy; 0x2euy; 0x21uy; 0xd5uy; 0x14uy; 0xb2uy; 0x54uy; 0x66uy; 0x93uy; 0x1cuy; 0x7duy; 0x8fuy; 0x6auy; 0x5auy; 0xacuy; 0x84uy; 0xaauy; 0x05uy; 0x1buy; 0xa3uy; 0x0buy; 0x39uy; 0x6auy; 0x0auy; 0xacuy; 0x97uy; 0x3duy; 0x58uy; 0xe0uy; 0x91uy; 0x47uy; 0x3fuy; 0x59uy; 0x85uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output2_len: (x:UInt32.t { UInt32.v x = B.length output2 }) =
64ul
let key3: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key3_len: (x:UInt32.t { UInt32.v x = B.length key3 }) =
16ul
let nonce3: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce3_len: (x:UInt32.t { UInt32.v x = B.length nonce3 }) =
12ul | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val aad3:(b: B.buffer UInt8.t {B.length b = 20 /\ B.recallable b}) | [] | Test.Vectors.Aes128Gcm.aad3 | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 20 /\ LowStar.Monotonic.Buffer.recallable b} | {
"end_col": 38,
"end_line": 170,
"start_col": 2,
"start_line": 168
} |
Prims.Tot | val output1:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy; 0x71uy; 0xb2uy; 0xfeuy; 0x78uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l | val output1:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b})
let output1:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b}) = | false | null | false | [@@ inline_let ]let l =
[
0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy;
0x71uy; 0xb2uy; 0xfeuy; 0x78uy
]
in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val output1:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b}) | [] | Test.Vectors.Aes128Gcm.output1 | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 16 /\ LowStar.Monotonic.Buffer.recallable b} | {
"end_col": 38,
"end_line": 98,
"start_col": 2,
"start_line": 96
} |
Prims.Tot | val nonce2:(b: B.buffer UInt8.t {B.length b = 12 /\ B.recallable b}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let nonce2: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l | val nonce2:(b: B.buffer UInt8.t {B.length b = 12 /\ B.recallable b})
let nonce2:(b: B.buffer UInt8.t {B.length b = 12 /\ B.recallable b}) = | false | null | false | [@@ inline_let ]let l =
[0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy]
in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy; 0x71uy; 0xb2uy; 0xfeuy; 0x78uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul
let key2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key2_len: (x:UInt32.t { UInt32.v x = B.length key2 }) =
16ul | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val nonce2:(b: B.buffer UInt8.t {B.length b = 12 /\ B.recallable b}) | [] | Test.Vectors.Aes128Gcm.nonce2 | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 12 /\ LowStar.Monotonic.Buffer.recallable b} | {
"end_col": 38,
"end_line": 114,
"start_col": 2,
"start_line": 112
} |
Prims.Tot | val output2:(b: B.buffer UInt8.t {B.length b = 64 /\ B.recallable b}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let output2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b }) =
[@inline_let] let l = [ 0x42uy; 0x83uy; 0x1euy; 0xc2uy; 0x21uy; 0x77uy; 0x74uy; 0x24uy; 0x4buy; 0x72uy; 0x21uy; 0xb7uy; 0x84uy; 0xd0uy; 0xd4uy; 0x9cuy; 0xe3uy; 0xaauy; 0x21uy; 0x2fuy; 0x2cuy; 0x02uy; 0xa4uy; 0xe0uy; 0x35uy; 0xc1uy; 0x7euy; 0x23uy; 0x29uy; 0xacuy; 0xa1uy; 0x2euy; 0x21uy; 0xd5uy; 0x14uy; 0xb2uy; 0x54uy; 0x66uy; 0x93uy; 0x1cuy; 0x7duy; 0x8fuy; 0x6auy; 0x5auy; 0xacuy; 0x84uy; 0xaauy; 0x05uy; 0x1buy; 0xa3uy; 0x0buy; 0x39uy; 0x6auy; 0x0auy; 0xacuy; 0x97uy; 0x3duy; 0x58uy; 0xe0uy; 0x91uy; 0x47uy; 0x3fuy; 0x59uy; 0x85uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l | val output2:(b: B.buffer UInt8.t {B.length b = 64 /\ B.recallable b})
let output2:(b: B.buffer UInt8.t {B.length b = 64 /\ B.recallable b}) = | false | null | false | [@@ inline_let ]let l =
[
0x42uy; 0x83uy; 0x1euy; 0xc2uy; 0x21uy; 0x77uy; 0x74uy; 0x24uy; 0x4buy; 0x72uy; 0x21uy; 0xb7uy;
0x84uy; 0xd0uy; 0xd4uy; 0x9cuy; 0xe3uy; 0xaauy; 0x21uy; 0x2fuy; 0x2cuy; 0x02uy; 0xa4uy; 0xe0uy;
0x35uy; 0xc1uy; 0x7euy; 0x23uy; 0x29uy; 0xacuy; 0xa1uy; 0x2euy; 0x21uy; 0xd5uy; 0x14uy; 0xb2uy;
0x54uy; 0x66uy; 0x93uy; 0x1cuy; 0x7duy; 0x8fuy; 0x6auy; 0x5auy; 0xacuy; 0x84uy; 0xaauy; 0x05uy;
0x1buy; 0xa3uy; 0x0buy; 0x39uy; 0x6auy; 0x0auy; 0xacuy; 0x97uy; 0x3duy; 0x58uy; 0xe0uy; 0x91uy;
0x47uy; 0x3fuy; 0x59uy; 0x85uy
]
in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy; 0x71uy; 0xb2uy; 0xfeuy; 0x78uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul
let key2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key2_len: (x:UInt32.t { UInt32.v x = B.length key2 }) =
16ul
let nonce2: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce2_len: (x:UInt32.t { UInt32.v x = B.length nonce2 }) =
12ul
let aad2: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad2_len: (x:UInt32.t { UInt32.v x = B.length aad2 }) =
0ul
let input2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b /\ B.disjoint b aad2 }) =
B.recall aad2;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; 0x1auy; 0xafuy; 0xd2uy; 0x55uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input2_len: (x:UInt32.t { UInt32.v x = B.length input2 }) =
64ul
let tag2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4duy; 0x5cuy; 0x2auy; 0xf3uy; 0x27uy; 0xcduy; 0x64uy; 0xa6uy; 0x2cuy; 0xf3uy; 0x5auy; 0xbduy; 0x2buy; 0xa6uy; 0xfauy; 0xb4uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag2_len: (x:UInt32.t { UInt32.v x = B.length tag2 }) =
16ul | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val output2:(b: B.buffer UInt8.t {B.length b = 64 /\ B.recallable b}) | [] | Test.Vectors.Aes128Gcm.output2 | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 64 /\ LowStar.Monotonic.Buffer.recallable b} | {
"end_col": 38,
"end_line": 146,
"start_col": 2,
"start_line": 144
} |
Prims.Tot | val tag3:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tag3: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5buy; 0xc9uy; 0x4fuy; 0xbcuy; 0x32uy; 0x21uy; 0xa5uy; 0xdbuy; 0x94uy; 0xfauy; 0xe9uy; 0x5auy; 0xe7uy; 0x12uy; 0x1auy; 0x47uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l | val tag3:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b})
let tag3:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b}) = | false | null | false | [@@ inline_let ]let l =
[
0x5buy; 0xc9uy; 0x4fuy; 0xbcuy; 0x32uy; 0x21uy; 0xa5uy; 0xdbuy; 0x94uy; 0xfauy; 0xe9uy; 0x5auy;
0xe7uy; 0x12uy; 0x1auy; 0x47uy
]
in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy; 0x71uy; 0xb2uy; 0xfeuy; 0x78uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul
let key2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key2_len: (x:UInt32.t { UInt32.v x = B.length key2 }) =
16ul
let nonce2: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce2_len: (x:UInt32.t { UInt32.v x = B.length nonce2 }) =
12ul
let aad2: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad2_len: (x:UInt32.t { UInt32.v x = B.length aad2 }) =
0ul
let input2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b /\ B.disjoint b aad2 }) =
B.recall aad2;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; 0x1auy; 0xafuy; 0xd2uy; 0x55uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input2_len: (x:UInt32.t { UInt32.v x = B.length input2 }) =
64ul
let tag2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4duy; 0x5cuy; 0x2auy; 0xf3uy; 0x27uy; 0xcduy; 0x64uy; 0xa6uy; 0x2cuy; 0xf3uy; 0x5auy; 0xbduy; 0x2buy; 0xa6uy; 0xfauy; 0xb4uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag2_len: (x:UInt32.t { UInt32.v x = B.length tag2 }) =
16ul
let output2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b }) =
[@inline_let] let l = [ 0x42uy; 0x83uy; 0x1euy; 0xc2uy; 0x21uy; 0x77uy; 0x74uy; 0x24uy; 0x4buy; 0x72uy; 0x21uy; 0xb7uy; 0x84uy; 0xd0uy; 0xd4uy; 0x9cuy; 0xe3uy; 0xaauy; 0x21uy; 0x2fuy; 0x2cuy; 0x02uy; 0xa4uy; 0xe0uy; 0x35uy; 0xc1uy; 0x7euy; 0x23uy; 0x29uy; 0xacuy; 0xa1uy; 0x2euy; 0x21uy; 0xd5uy; 0x14uy; 0xb2uy; 0x54uy; 0x66uy; 0x93uy; 0x1cuy; 0x7duy; 0x8fuy; 0x6auy; 0x5auy; 0xacuy; 0x84uy; 0xaauy; 0x05uy; 0x1buy; 0xa3uy; 0x0buy; 0x39uy; 0x6auy; 0x0auy; 0xacuy; 0x97uy; 0x3duy; 0x58uy; 0xe0uy; 0x91uy; 0x47uy; 0x3fuy; 0x59uy; 0x85uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output2_len: (x:UInt32.t { UInt32.v x = B.length output2 }) =
64ul
let key3: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key3_len: (x:UInt32.t { UInt32.v x = B.length key3 }) =
16ul
let nonce3: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce3_len: (x:UInt32.t { UInt32.v x = B.length nonce3 }) =
12ul
let aad3: (b: B.buffer UInt8.t { B.length b = 20 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xeduy; 0xfauy; 0xceuy; 0xdeuy; 0xaduy; 0xbeuy; 0xefuy; 0xfeuy; 0xeduy; 0xfauy; 0xceuy; 0xdeuy; 0xaduy; 0xbeuy; 0xefuy; 0xabuy; 0xaduy; 0xdauy; 0xd2uy; ] in
assert_norm (List.Tot.length l = 20);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad3_len: (x:UInt32.t { UInt32.v x = B.length aad3 }) =
20ul
let input3: (b: B.buffer UInt8.t { B.length b = 60 /\ B.recallable b /\ B.disjoint b aad3 }) =
B.recall aad3;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; ] in
assert_norm (List.Tot.length l = 60);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input3_len: (x:UInt32.t { UInt32.v x = B.length input3 }) =
60ul | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tag3:(b: B.buffer UInt8.t {B.length b = 16 /\ B.recallable b}) | [] | Test.Vectors.Aes128Gcm.tag3 | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 16 /\ LowStar.Monotonic.Buffer.recallable b} | {
"end_col": 38,
"end_line": 186,
"start_col": 2,
"start_line": 184
} |
Prims.Tot | val output3:(b: B.buffer UInt8.t {B.length b = 60 /\ B.recallable b}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let output3: (b: B.buffer UInt8.t { B.length b = 60 /\ B.recallable b }) =
[@inline_let] let l = [ 0x42uy; 0x83uy; 0x1euy; 0xc2uy; 0x21uy; 0x77uy; 0x74uy; 0x24uy; 0x4buy; 0x72uy; 0x21uy; 0xb7uy; 0x84uy; 0xd0uy; 0xd4uy; 0x9cuy; 0xe3uy; 0xaauy; 0x21uy; 0x2fuy; 0x2cuy; 0x02uy; 0xa4uy; 0xe0uy; 0x35uy; 0xc1uy; 0x7euy; 0x23uy; 0x29uy; 0xacuy; 0xa1uy; 0x2euy; 0x21uy; 0xd5uy; 0x14uy; 0xb2uy; 0x54uy; 0x66uy; 0x93uy; 0x1cuy; 0x7duy; 0x8fuy; 0x6auy; 0x5auy; 0xacuy; 0x84uy; 0xaauy; 0x05uy; 0x1buy; 0xa3uy; 0x0buy; 0x39uy; 0x6auy; 0x0auy; 0xacuy; 0x97uy; 0x3duy; 0x58uy; 0xe0uy; 0x91uy; ] in
assert_norm (List.Tot.length l = 60);
B.gcmalloc_of_list HyperStack.root l | val output3:(b: B.buffer UInt8.t {B.length b = 60 /\ B.recallable b})
let output3:(b: B.buffer UInt8.t {B.length b = 60 /\ B.recallable b}) = | false | null | false | [@@ inline_let ]let l =
[
0x42uy; 0x83uy; 0x1euy; 0xc2uy; 0x21uy; 0x77uy; 0x74uy; 0x24uy; 0x4buy; 0x72uy; 0x21uy; 0xb7uy;
0x84uy; 0xd0uy; 0xd4uy; 0x9cuy; 0xe3uy; 0xaauy; 0x21uy; 0x2fuy; 0x2cuy; 0x02uy; 0xa4uy; 0xe0uy;
0x35uy; 0xc1uy; 0x7euy; 0x23uy; 0x29uy; 0xacuy; 0xa1uy; 0x2euy; 0x21uy; 0xd5uy; 0x14uy; 0xb2uy;
0x54uy; 0x66uy; 0x93uy; 0x1cuy; 0x7duy; 0x8fuy; 0x6auy; 0x5auy; 0xacuy; 0x84uy; 0xaauy; 0x05uy;
0x1buy; 0xa3uy; 0x0buy; 0x39uy; 0x6auy; 0x0auy; 0xacuy; 0x97uy; 0x3duy; 0x58uy; 0xe0uy; 0x91uy
]
in
assert_norm (List.Tot.length l = 60);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy; 0x71uy; 0xb2uy; 0xfeuy; 0x78uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul
let key2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key2_len: (x:UInt32.t { UInt32.v x = B.length key2 }) =
16ul
let nonce2: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce2_len: (x:UInt32.t { UInt32.v x = B.length nonce2 }) =
12ul
let aad2: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad2_len: (x:UInt32.t { UInt32.v x = B.length aad2 }) =
0ul
let input2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b /\ B.disjoint b aad2 }) =
B.recall aad2;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; 0x1auy; 0xafuy; 0xd2uy; 0x55uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input2_len: (x:UInt32.t { UInt32.v x = B.length input2 }) =
64ul
let tag2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4duy; 0x5cuy; 0x2auy; 0xf3uy; 0x27uy; 0xcduy; 0x64uy; 0xa6uy; 0x2cuy; 0xf3uy; 0x5auy; 0xbduy; 0x2buy; 0xa6uy; 0xfauy; 0xb4uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag2_len: (x:UInt32.t { UInt32.v x = B.length tag2 }) =
16ul
let output2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b }) =
[@inline_let] let l = [ 0x42uy; 0x83uy; 0x1euy; 0xc2uy; 0x21uy; 0x77uy; 0x74uy; 0x24uy; 0x4buy; 0x72uy; 0x21uy; 0xb7uy; 0x84uy; 0xd0uy; 0xd4uy; 0x9cuy; 0xe3uy; 0xaauy; 0x21uy; 0x2fuy; 0x2cuy; 0x02uy; 0xa4uy; 0xe0uy; 0x35uy; 0xc1uy; 0x7euy; 0x23uy; 0x29uy; 0xacuy; 0xa1uy; 0x2euy; 0x21uy; 0xd5uy; 0x14uy; 0xb2uy; 0x54uy; 0x66uy; 0x93uy; 0x1cuy; 0x7duy; 0x8fuy; 0x6auy; 0x5auy; 0xacuy; 0x84uy; 0xaauy; 0x05uy; 0x1buy; 0xa3uy; 0x0buy; 0x39uy; 0x6auy; 0x0auy; 0xacuy; 0x97uy; 0x3duy; 0x58uy; 0xe0uy; 0x91uy; 0x47uy; 0x3fuy; 0x59uy; 0x85uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output2_len: (x:UInt32.t { UInt32.v x = B.length output2 }) =
64ul
let key3: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key3_len: (x:UInt32.t { UInt32.v x = B.length key3 }) =
16ul
let nonce3: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce3_len: (x:UInt32.t { UInt32.v x = B.length nonce3 }) =
12ul
let aad3: (b: B.buffer UInt8.t { B.length b = 20 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xeduy; 0xfauy; 0xceuy; 0xdeuy; 0xaduy; 0xbeuy; 0xefuy; 0xfeuy; 0xeduy; 0xfauy; 0xceuy; 0xdeuy; 0xaduy; 0xbeuy; 0xefuy; 0xabuy; 0xaduy; 0xdauy; 0xd2uy; ] in
assert_norm (List.Tot.length l = 20);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad3_len: (x:UInt32.t { UInt32.v x = B.length aad3 }) =
20ul
let input3: (b: B.buffer UInt8.t { B.length b = 60 /\ B.recallable b /\ B.disjoint b aad3 }) =
B.recall aad3;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; ] in
assert_norm (List.Tot.length l = 60);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input3_len: (x:UInt32.t { UInt32.v x = B.length input3 }) =
60ul
let tag3: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5buy; 0xc9uy; 0x4fuy; 0xbcuy; 0x32uy; 0x21uy; 0xa5uy; 0xdbuy; 0x94uy; 0xfauy; 0xe9uy; 0x5auy; 0xe7uy; 0x12uy; 0x1auy; 0x47uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag3_len: (x:UInt32.t { UInt32.v x = B.length tag3 }) =
16ul | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val output3:(b: B.buffer UInt8.t {B.length b = 60 /\ B.recallable b}) | [] | Test.Vectors.Aes128Gcm.output3 | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b:
LowStar.Buffer.buffer FStar.UInt8.t
{LowStar.Monotonic.Buffer.length b = 60 /\ LowStar.Monotonic.Buffer.recallable b} | {
"end_col": 38,
"end_line": 194,
"start_col": 2,
"start_line": 192
} |
Prims.Tot | val input2:(b: B.buffer UInt8.t {B.length b = 64 /\ B.recallable b /\ B.disjoint b aad2}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let input2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b /\ B.disjoint b aad2 }) =
B.recall aad2;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; 0x1auy; 0xafuy; 0xd2uy; 0x55uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l | val input2:(b: B.buffer UInt8.t {B.length b = 64 /\ B.recallable b /\ B.disjoint b aad2})
let input2:(b: B.buffer UInt8.t {B.length b = 64 /\ B.recallable b /\ B.disjoint b aad2}) = | false | null | false | B.recall aad2;
[@@ inline_let ]let l =
[
0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy;
0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy;
0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy;
0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy;
0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy;
0x1auy; 0xafuy; 0xd2uy; 0x55uy
]
in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"LowStar.Monotonic.Buffer.disjoint",
"Test.Vectors.Aes128Gcm.aad2",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil",
"LowStar.Monotonic.Buffer.recall"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy; 0x71uy; 0xb2uy; 0xfeuy; 0x78uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul
let key2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key2_len: (x:UInt32.t { UInt32.v x = B.length key2 }) =
16ul
let nonce2: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce2_len: (x:UInt32.t { UInt32.v x = B.length nonce2 }) =
12ul
let aad2: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad2_len: (x:UInt32.t { UInt32.v x = B.length aad2 }) =
0ul | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val input2:(b: B.buffer UInt8.t {B.length b = 64 /\ B.recallable b /\ B.disjoint b aad2}) | [] | Test.Vectors.Aes128Gcm.input2 | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b:
LowStar.Buffer.buffer FStar.UInt8.t
{ LowStar.Monotonic.Buffer.length b = 64 /\ LowStar.Monotonic.Buffer.recallable b /\
LowStar.Monotonic.Buffer.disjoint b Test.Vectors.Aes128Gcm.aad2 } | {
"end_col": 38,
"end_line": 130,
"start_col": 5,
"start_line": 128
} |
Prims.Tot | val vectors:(b: B.buffer vector {B.length b = 4 /\ B.recallable b}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let vectors: (b: B.buffer vector { B.length b = 4 /\ B.recallable b }) =
[@inline_let] let l = [
Vector output0 output0_len tag0 tag0_len input0 input0_len aad0 aad0_len nonce0 nonce0_len key0 key0_len ;
Vector output1 output1_len tag1 tag1_len input1 input1_len aad1 aad1_len nonce1 nonce1_len key1 key1_len ;
Vector output2 output2_len tag2 tag2_len input2 input2_len aad2 aad2_len nonce2 nonce2_len key2 key2_len ;
Vector output3 output3_len tag3 tag3_len input3 input3_len aad3 aad3_len nonce3 nonce3_len key3 key3_len ;
] in
assert_norm (List.Tot.length l = 4);
B.gcmalloc_of_list HyperStack.root l | val vectors:(b: B.buffer vector {B.length b = 4 /\ B.recallable b})
let vectors:(b: B.buffer vector {B.length b = 4 /\ B.recallable b}) = | false | null | false | [@@ inline_let ]let l =
[
Vector output0
output0_len
tag0
tag0_len
input0
input0_len
aad0
aad0_len
nonce0
nonce0_len
key0
key0_len;
Vector output1
output1_len
tag1
tag1_len
input1
input1_len
aad1
aad1_len
nonce1
nonce1_len
key1
key1_len;
Vector output2
output2_len
tag2
tag2_len
input2
input2_len
aad2
aad2_len
nonce2
nonce2_len
key2
key2_len;
Vector output3
output3_len
tag3
tag3_len
input3
input3_len
aad3
aad3_len
nonce3
nonce3_len
key3
key3_len
]
in
assert_norm (List.Tot.length l = 4);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"LowStar.Buffer.gcmalloc_of_list",
"Test.Vectors.Aes128Gcm.vector",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"Prims.list",
"Prims.Cons",
"Test.Vectors.Aes128Gcm.Vector",
"Test.Vectors.Aes128Gcm.output0",
"Test.Vectors.Aes128Gcm.output0_len",
"Test.Vectors.Aes128Gcm.tag0",
"Test.Vectors.Aes128Gcm.tag0_len",
"Test.Vectors.Aes128Gcm.input0",
"Test.Vectors.Aes128Gcm.input0_len",
"Test.Vectors.Aes128Gcm.aad0",
"Test.Vectors.Aes128Gcm.aad0_len",
"Test.Vectors.Aes128Gcm.nonce0",
"Test.Vectors.Aes128Gcm.nonce0_len",
"Test.Vectors.Aes128Gcm.key0",
"Test.Vectors.Aes128Gcm.key0_len",
"Test.Vectors.Aes128Gcm.output1",
"Test.Vectors.Aes128Gcm.output1_len",
"Test.Vectors.Aes128Gcm.tag1",
"Test.Vectors.Aes128Gcm.tag1_len",
"Test.Vectors.Aes128Gcm.input1",
"Test.Vectors.Aes128Gcm.input1_len",
"Test.Vectors.Aes128Gcm.aad1",
"Test.Vectors.Aes128Gcm.aad1_len",
"Test.Vectors.Aes128Gcm.nonce1",
"Test.Vectors.Aes128Gcm.nonce1_len",
"Test.Vectors.Aes128Gcm.key1",
"Test.Vectors.Aes128Gcm.key1_len",
"Test.Vectors.Aes128Gcm.output2",
"Test.Vectors.Aes128Gcm.output2_len",
"Test.Vectors.Aes128Gcm.tag2",
"Test.Vectors.Aes128Gcm.tag2_len",
"Test.Vectors.Aes128Gcm.input2",
"Test.Vectors.Aes128Gcm.input2_len",
"Test.Vectors.Aes128Gcm.aad2",
"Test.Vectors.Aes128Gcm.aad2_len",
"Test.Vectors.Aes128Gcm.nonce2",
"Test.Vectors.Aes128Gcm.nonce2_len",
"Test.Vectors.Aes128Gcm.key2",
"Test.Vectors.Aes128Gcm.key2_len",
"Test.Vectors.Aes128Gcm.output3",
"Test.Vectors.Aes128Gcm.output3_len",
"Test.Vectors.Aes128Gcm.tag3",
"Test.Vectors.Aes128Gcm.tag3_len",
"Test.Vectors.Aes128Gcm.input3",
"Test.Vectors.Aes128Gcm.input3_len",
"Test.Vectors.Aes128Gcm.aad3",
"Test.Vectors.Aes128Gcm.aad3_len",
"Test.Vectors.Aes128Gcm.nonce3",
"Test.Vectors.Aes128Gcm.nonce3_len",
"Test.Vectors.Aes128Gcm.key3",
"Test.Vectors.Aes128Gcm.key3_len",
"Prims.Nil"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy; 0x71uy; 0xb2uy; 0xfeuy; 0x78uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul
let key2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key2_len: (x:UInt32.t { UInt32.v x = B.length key2 }) =
16ul
let nonce2: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce2_len: (x:UInt32.t { UInt32.v x = B.length nonce2 }) =
12ul
let aad2: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad2_len: (x:UInt32.t { UInt32.v x = B.length aad2 }) =
0ul
let input2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b /\ B.disjoint b aad2 }) =
B.recall aad2;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; 0x1auy; 0xafuy; 0xd2uy; 0x55uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input2_len: (x:UInt32.t { UInt32.v x = B.length input2 }) =
64ul
let tag2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4duy; 0x5cuy; 0x2auy; 0xf3uy; 0x27uy; 0xcduy; 0x64uy; 0xa6uy; 0x2cuy; 0xf3uy; 0x5auy; 0xbduy; 0x2buy; 0xa6uy; 0xfauy; 0xb4uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag2_len: (x:UInt32.t { UInt32.v x = B.length tag2 }) =
16ul
let output2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b }) =
[@inline_let] let l = [ 0x42uy; 0x83uy; 0x1euy; 0xc2uy; 0x21uy; 0x77uy; 0x74uy; 0x24uy; 0x4buy; 0x72uy; 0x21uy; 0xb7uy; 0x84uy; 0xd0uy; 0xd4uy; 0x9cuy; 0xe3uy; 0xaauy; 0x21uy; 0x2fuy; 0x2cuy; 0x02uy; 0xa4uy; 0xe0uy; 0x35uy; 0xc1uy; 0x7euy; 0x23uy; 0x29uy; 0xacuy; 0xa1uy; 0x2euy; 0x21uy; 0xd5uy; 0x14uy; 0xb2uy; 0x54uy; 0x66uy; 0x93uy; 0x1cuy; 0x7duy; 0x8fuy; 0x6auy; 0x5auy; 0xacuy; 0x84uy; 0xaauy; 0x05uy; 0x1buy; 0xa3uy; 0x0buy; 0x39uy; 0x6auy; 0x0auy; 0xacuy; 0x97uy; 0x3duy; 0x58uy; 0xe0uy; 0x91uy; 0x47uy; 0x3fuy; 0x59uy; 0x85uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output2_len: (x:UInt32.t { UInt32.v x = B.length output2 }) =
64ul
let key3: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key3_len: (x:UInt32.t { UInt32.v x = B.length key3 }) =
16ul
let nonce3: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce3_len: (x:UInt32.t { UInt32.v x = B.length nonce3 }) =
12ul
let aad3: (b: B.buffer UInt8.t { B.length b = 20 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xeduy; 0xfauy; 0xceuy; 0xdeuy; 0xaduy; 0xbeuy; 0xefuy; 0xfeuy; 0xeduy; 0xfauy; 0xceuy; 0xdeuy; 0xaduy; 0xbeuy; 0xefuy; 0xabuy; 0xaduy; 0xdauy; 0xd2uy; ] in
assert_norm (List.Tot.length l = 20);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad3_len: (x:UInt32.t { UInt32.v x = B.length aad3 }) =
20ul
let input3: (b: B.buffer UInt8.t { B.length b = 60 /\ B.recallable b /\ B.disjoint b aad3 }) =
B.recall aad3;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; ] in
assert_norm (List.Tot.length l = 60);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input3_len: (x:UInt32.t { UInt32.v x = B.length input3 }) =
60ul
let tag3: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5buy; 0xc9uy; 0x4fuy; 0xbcuy; 0x32uy; 0x21uy; 0xa5uy; 0xdbuy; 0x94uy; 0xfauy; 0xe9uy; 0x5auy; 0xe7uy; 0x12uy; 0x1auy; 0x47uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag3_len: (x:UInt32.t { UInt32.v x = B.length tag3 }) =
16ul
let output3: (b: B.buffer UInt8.t { B.length b = 60 /\ B.recallable b }) =
[@inline_let] let l = [ 0x42uy; 0x83uy; 0x1euy; 0xc2uy; 0x21uy; 0x77uy; 0x74uy; 0x24uy; 0x4buy; 0x72uy; 0x21uy; 0xb7uy; 0x84uy; 0xd0uy; 0xd4uy; 0x9cuy; 0xe3uy; 0xaauy; 0x21uy; 0x2fuy; 0x2cuy; 0x02uy; 0xa4uy; 0xe0uy; 0x35uy; 0xc1uy; 0x7euy; 0x23uy; 0x29uy; 0xacuy; 0xa1uy; 0x2euy; 0x21uy; 0xd5uy; 0x14uy; 0xb2uy; 0x54uy; 0x66uy; 0x93uy; 0x1cuy; 0x7duy; 0x8fuy; 0x6auy; 0x5auy; 0xacuy; 0x84uy; 0xaauy; 0x05uy; 0x1buy; 0xa3uy; 0x0buy; 0x39uy; 0x6auy; 0x0auy; 0xacuy; 0x97uy; 0x3duy; 0x58uy; 0xe0uy; 0x91uy; ] in
assert_norm (List.Tot.length l = 60);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output3_len: (x:UInt32.t { UInt32.v x = B.length output3 }) =
60ul
noeq
type vector = | Vector:
output: B.buffer UInt8.t { B.recallable output } ->
output_len: UInt32.t { B.length output = UInt32.v output_len } ->
tag: B.buffer UInt8.t { B.recallable tag } ->
tag_len: UInt32.t { B.length tag = UInt32.v tag_len } ->
input: B.buffer UInt8.t { B.recallable input } ->
input_len: UInt32.t { B.length input = UInt32.v input_len } ->
aad: B.buffer UInt8.t { B.recallable aad /\ B.disjoint input aad } ->
aad_len: UInt32.t { B.length aad = UInt32.v aad_len } ->
nonce: B.buffer UInt8.t { B.recallable nonce } ->
nonce_len: UInt32.t { B.length nonce = UInt32.v nonce_len } ->
key: B.buffer UInt8.t { B.recallable key } ->
key_len: UInt32.t { B.length key = UInt32.v key_len } ->
vector | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val vectors:(b: B.buffer vector {B.length b = 4 /\ B.recallable b}) | [] | Test.Vectors.Aes128Gcm.vectors | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b:
LowStar.Buffer.buffer Test.Vectors.Aes128Gcm.vector
{LowStar.Monotonic.Buffer.length b = 4 /\ LowStar.Monotonic.Buffer.recallable b} | {
"end_col": 38,
"end_line": 223,
"start_col": 2,
"start_line": 216
} |
Prims.Tot | val input3:(b: B.buffer UInt8.t {B.length b = 60 /\ B.recallable b /\ B.disjoint b aad3}) | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let input3: (b: B.buffer UInt8.t { B.length b = 60 /\ B.recallable b /\ B.disjoint b aad3 }) =
B.recall aad3;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; ] in
assert_norm (List.Tot.length l = 60);
B.gcmalloc_of_list HyperStack.root l | val input3:(b: B.buffer UInt8.t {B.length b = 60 /\ B.recallable b /\ B.disjoint b aad3})
let input3:(b: B.buffer UInt8.t {B.length b = 60 /\ B.recallable b /\ B.disjoint b aad3}) = | false | null | false | B.recall aad3;
[@@ inline_let ]let l =
[
0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy;
0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy;
0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy;
0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy;
0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy
]
in
assert_norm (List.Tot.length l = 60);
B.gcmalloc_of_list HyperStack.root l | {
"checked_file": "Test.Vectors.Aes128Gcm.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Aes128Gcm.fst"
} | [
"total"
] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"LowStar.Monotonic.Buffer.disjoint",
"Test.Vectors.Aes128Gcm.aad3",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil",
"LowStar.Monotonic.Buffer.recall"
] | [] | module Test.Vectors.Aes128Gcm
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
16ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
0ul
let input0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
0ul
let tag0: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x58uy; 0xe2uy; 0xfcuy; 0xceuy; 0xfauy; 0x7euy; 0x30uy; 0x61uy; 0x36uy; 0x7fuy; 0x1duy; 0x57uy; 0xa4uy; 0xe7uy; 0x45uy; 0x5auy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag0_len: (x:UInt32.t { UInt32.v x = B.length tag0 }) =
16ul
let output0: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
0ul
let key1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
16ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
16ul
let tag1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xabuy; 0x6euy; 0x47uy; 0xd4uy; 0x2cuy; 0xecuy; 0x13uy; 0xbduy; 0xf5uy; 0x3auy; 0x67uy; 0xb2uy; 0x12uy; 0x57uy; 0xbduy; 0xdfuy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag1_len: (x:UInt32.t { UInt32.v x = B.length tag1 }) =
16ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x03uy; 0x88uy; 0xdauy; 0xceuy; 0x60uy; 0xb6uy; 0xa3uy; 0x92uy; 0xf3uy; 0x28uy; 0xc2uy; 0xb9uy; 0x71uy; 0xb2uy; 0xfeuy; 0x78uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul
let key2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key2_len: (x:UInt32.t { UInt32.v x = B.length key2 }) =
16ul
let nonce2: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce2_len: (x:UInt32.t { UInt32.v x = B.length nonce2 }) =
12ul
let aad2: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad2_len: (x:UInt32.t { UInt32.v x = B.length aad2 }) =
0ul
let input2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b /\ B.disjoint b aad2 }) =
B.recall aad2;[@inline_let] let l = [ 0xd9uy; 0x31uy; 0x32uy; 0x25uy; 0xf8uy; 0x84uy; 0x06uy; 0xe5uy; 0xa5uy; 0x59uy; 0x09uy; 0xc5uy; 0xafuy; 0xf5uy; 0x26uy; 0x9auy; 0x86uy; 0xa7uy; 0xa9uy; 0x53uy; 0x15uy; 0x34uy; 0xf7uy; 0xdauy; 0x2euy; 0x4cuy; 0x30uy; 0x3duy; 0x8auy; 0x31uy; 0x8auy; 0x72uy; 0x1cuy; 0x3cuy; 0x0cuy; 0x95uy; 0x95uy; 0x68uy; 0x09uy; 0x53uy; 0x2fuy; 0xcfuy; 0x0euy; 0x24uy; 0x49uy; 0xa6uy; 0xb5uy; 0x25uy; 0xb1uy; 0x6auy; 0xeduy; 0xf5uy; 0xaauy; 0x0duy; 0xe6uy; 0x57uy; 0xbauy; 0x63uy; 0x7buy; 0x39uy; 0x1auy; 0xafuy; 0xd2uy; 0x55uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input2_len: (x:UInt32.t { UInt32.v x = B.length input2 }) =
64ul
let tag2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4duy; 0x5cuy; 0x2auy; 0xf3uy; 0x27uy; 0xcduy; 0x64uy; 0xa6uy; 0x2cuy; 0xf3uy; 0x5auy; 0xbduy; 0x2buy; 0xa6uy; 0xfauy; 0xb4uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let tag2_len: (x:UInt32.t { UInt32.v x = B.length tag2 }) =
16ul
let output2: (b: B.buffer UInt8.t { B.length b = 64 /\ B.recallable b }) =
[@inline_let] let l = [ 0x42uy; 0x83uy; 0x1euy; 0xc2uy; 0x21uy; 0x77uy; 0x74uy; 0x24uy; 0x4buy; 0x72uy; 0x21uy; 0xb7uy; 0x84uy; 0xd0uy; 0xd4uy; 0x9cuy; 0xe3uy; 0xaauy; 0x21uy; 0x2fuy; 0x2cuy; 0x02uy; 0xa4uy; 0xe0uy; 0x35uy; 0xc1uy; 0x7euy; 0x23uy; 0x29uy; 0xacuy; 0xa1uy; 0x2euy; 0x21uy; 0xd5uy; 0x14uy; 0xb2uy; 0x54uy; 0x66uy; 0x93uy; 0x1cuy; 0x7duy; 0x8fuy; 0x6auy; 0x5auy; 0xacuy; 0x84uy; 0xaauy; 0x05uy; 0x1buy; 0xa3uy; 0x0buy; 0x39uy; 0x6auy; 0x0auy; 0xacuy; 0x97uy; 0x3duy; 0x58uy; 0xe0uy; 0x91uy; 0x47uy; 0x3fuy; 0x59uy; 0x85uy; ] in
assert_norm (List.Tot.length l = 64);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output2_len: (x:UInt32.t { UInt32.v x = B.length output2 }) =
64ul
let key3: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xffuy; 0xe9uy; 0x92uy; 0x86uy; 0x65uy; 0x73uy; 0x1cuy; 0x6duy; 0x6auy; 0x8fuy; 0x94uy; 0x67uy; 0x30uy; 0x83uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key3_len: (x:UInt32.t { UInt32.v x = B.length key3 }) =
16ul
let nonce3: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xcauy; 0xfeuy; 0xbauy; 0xbeuy; 0xfauy; 0xceuy; 0xdbuy; 0xaduy; 0xdeuy; 0xcauy; 0xf8uy; 0x88uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce3_len: (x:UInt32.t { UInt32.v x = B.length nonce3 }) =
12ul
let aad3: (b: B.buffer UInt8.t { B.length b = 20 /\ B.recallable b }) =
[@inline_let] let l = [ 0xfeuy; 0xeduy; 0xfauy; 0xceuy; 0xdeuy; 0xaduy; 0xbeuy; 0xefuy; 0xfeuy; 0xeduy; 0xfauy; 0xceuy; 0xdeuy; 0xaduy; 0xbeuy; 0xefuy; 0xabuy; 0xaduy; 0xdauy; 0xd2uy; ] in
assert_norm (List.Tot.length l = 20);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad3_len: (x:UInt32.t { UInt32.v x = B.length aad3 }) =
20ul | false | false | Test.Vectors.Aes128Gcm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val input3:(b: B.buffer UInt8.t {B.length b = 60 /\ B.recallable b /\ B.disjoint b aad3}) | [] | Test.Vectors.Aes128Gcm.input3 | {
"file_name": "providers/test/vectors/Test.Vectors.Aes128Gcm.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b:
LowStar.Buffer.buffer FStar.UInt8.t
{ LowStar.Monotonic.Buffer.length b = 60 /\ LowStar.Monotonic.Buffer.recallable b /\
LowStar.Monotonic.Buffer.disjoint b Test.Vectors.Aes128Gcm.aad3 } | {
"end_col": 38,
"end_line": 178,
"start_col": 5,
"start_line": 176
} |
Prims.Tot | val va_ens_Fast_add1
(va_b0: va_code)
(va_s0: va_state)
(dst_b inA_b: buffer64)
(inB: nat64)
(va_sM: va_state)
(va_fM: va_fuel)
: prop | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_ens_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1 va_b0 va_s0 dst_b inA_b inB /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))) | val va_ens_Fast_add1
(va_b0: va_code)
(va_s0: va_state)
(dst_b inA_b: buffer64)
(inB: nat64)
(va_sM: va_state)
(va_fM: va_fuel)
: prop
let va_ens_Fast_add1
(va_b0: va_code)
(va_s0: va_state)
(dst_b inA_b: buffer64)
(inB: nat64)
(va_sM: va_state)
(va_fM: va_fuel)
: prop = | false | null | false | (va_req_Fast_add1 va_b0 va_s0 dst_b inA_b inB /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\
(let a0:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let a1:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let a2:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let a3:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let a:Prims.nat = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in
let d1 = Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in
let d2 = Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in
let d3 = Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in
let d = Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in
d == a + va_get_reg64 rRdx va_s0 /\
Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem va_sM)) /\
va_state_eq va_sM
(va_update_flags va_sM
(va_update_mem_layout va_sM
(va_update_mem_heaplet 0
va_sM
(va_update_reg64 rR11
va_sM
(va_update_reg64 rR10
va_sM
(va_update_reg64 rR9
va_sM
(va_update_reg64 rR8
va_sM
(va_update_reg64 rRdx
va_sM
(va_update_reg64 rRax
va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))) | {
"checked_file": "Vale.Curve25519.X64.FastUtil.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Curve25519.X64.FastUtil.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_code",
"Vale.X64.Decls.va_state",
"Vale.X64.Memory.buffer64",
"Vale.X64.Memory.nat64",
"Vale.X64.Decls.va_fuel",
"Prims.l_and",
"Vale.Curve25519.X64.FastUtil.va_req_Fast_add1",
"Vale.X64.Decls.va_ensure_total",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Prims.eq2",
"Prims.int",
"Prims.op_Addition",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Decls.modifies_buffer",
"Vale.X64.Decls.va_get_mem",
"Prims.nat",
"Vale.Curve25519.Fast_defs.pow2_five",
"Vale.X64.Machine_s.rRax",
"Vale.Def.Words_s.nat64",
"Vale.X64.Decls.buffer64_read",
"Vale.Curve25519.Fast_defs.pow2_four",
"Vale.X64.Decls.va_state_eq",
"Vale.X64.Decls.va_update_flags",
"Vale.X64.Decls.va_update_mem_layout",
"Vale.X64.Decls.va_update_mem_heaplet",
"Vale.X64.Decls.va_update_reg64",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Decls.va_update_ok",
"Vale.X64.Decls.va_update_mem",
"Prims.prop"
] | [] | module Vale.Curve25519.X64.FastUtil
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.Curve25519.Fast_defs
open Vale.X64.CPU_Features_s
//-- Fast_add1
val va_code_Fast_add1 : va_dummy:unit -> Tot va_code
val va_codegen_success_Fast_add1 : va_dummy:unit -> Tot va_pbool
let va_req_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
: prop =
(va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0))
let va_ens_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) | false | true | Vale.Curve25519.X64.FastUtil.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_ens_Fast_add1
(va_b0: va_code)
(va_s0: va_state)
(dst_b inA_b: buffer64)
(inB: nat64)
(va_sM: va_state)
(va_fM: va_fuel)
: prop | [] | Vale.Curve25519.X64.FastUtil.va_ens_Fast_add1 | {
"file_name": "obj/Vale.Curve25519.X64.FastUtil.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
va_b0: Vale.X64.Decls.va_code ->
va_s0: Vale.X64.Decls.va_state ->
dst_b: Vale.X64.Memory.buffer64 ->
inA_b: Vale.X64.Memory.buffer64 ->
inB: Vale.X64.Memory.nat64 ->
va_sM: Vale.X64.Decls.va_state ->
va_fM: Vale.X64.Decls.va_fuel
-> Prims.prop | {
"end_col": 90,
"end_line": 53,
"start_col": 2,
"start_line": 38
} |
Prims.Tot | val va_req_Fast_add1 (va_b0: va_code) (va_s0: va_state) (dst_b inA_b: buffer64) (inB: nat64) : prop | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_req_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
: prop =
(va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0)) | val va_req_Fast_add1 (va_b0: va_code) (va_s0: va_state) (dst_b inA_b: buffer64) (inB: nat64) : prop
let va_req_Fast_add1 (va_b0: va_code) (va_s0: va_state) (dst_b inA_b: buffer64) (inB: nat64) : prop = | false | null | false | (va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\
(let a0:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let a1:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let a2:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let a3:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let a:Prims.nat = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
adx_enabled /\ bmi2_enabled /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRdi va_s0)
dst_b
4
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0)
inA_b
4
(va_get_mem_layout va_s0)
Secret /\ inB == va_get_reg64 rRdx va_s0)) | {
"checked_file": "Vale.Curve25519.X64.FastUtil.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Curve25519.X64.FastUtil.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_code",
"Vale.X64.Decls.va_state",
"Vale.X64.Memory.buffer64",
"Vale.X64.Memory.nat64",
"Prims.l_and",
"Vale.X64.Decls.va_require_total",
"Vale.Curve25519.X64.FastUtil.va_code_Fast_add1",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.adx_enabled",
"Vale.X64.CPU_Features_s.bmi2_enabled",
"Vale.X64.Memory.is_initial_heap",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Decls.va_get_mem",
"Prims.l_or",
"Vale.X64.Decls.buffers_disjoint",
"Prims.eq2",
"Vale.X64.Decls.validDstAddrs64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRdi",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.validSrcAddrs64",
"Vale.X64.Machine_s.rRsi",
"Vale.Def.Words_s.nat64",
"Vale.X64.Machine_s.rRdx",
"Prims.nat",
"Vale.Curve25519.Fast_defs.pow2_four",
"Vale.X64.Decls.buffer64_read",
"Prims.prop"
] | [] | module Vale.Curve25519.X64.FastUtil
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.Curve25519.Fast_defs
open Vale.X64.CPU_Features_s
//-- Fast_add1
val va_code_Fast_add1 : va_dummy:unit -> Tot va_code
val va_codegen_success_Fast_add1 : va_dummy:unit -> Tot va_pbool
let va_req_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) | false | true | Vale.Curve25519.X64.FastUtil.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_req_Fast_add1 (va_b0: va_code) (va_s0: va_state) (dst_b inA_b: buffer64) (inB: nat64) : prop | [] | Vale.Curve25519.X64.FastUtil.va_req_Fast_add1 | {
"file_name": "obj/Vale.Curve25519.X64.FastUtil.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
va_b0: Vale.X64.Decls.va_code ->
va_s0: Vale.X64.Decls.va_state ->
dst_b: Vale.X64.Memory.buffer64 ->
inA_b: Vale.X64.Memory.buffer64 ->
inB: Vale.X64.Memory.nat64
-> Prims.prop | {
"end_col": 11,
"end_line": 35,
"start_col": 2,
"start_line": 24
} |
Prims.Tot | val va_wp_Fast_add1
(dst_b inA_b: buffer64)
(inB: nat64)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) (va_s0:va_state) (va_k:(va_state
-> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
adx_enabled /\ bmi2_enabled /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0)
(va_get_mem va_s0) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rdx:nat64) (va_x_r8:nat64)
(va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64) (va_x_heap0:vale_heap)
(va_x_memLayout:vale_heap_layout) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags
va_x_efl (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8
(va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem va_s0))))))))) in
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) ==> va_k va_sM (()))) | val va_wp_Fast_add1
(dst_b inA_b: buffer64)
(inB: nat64)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_Fast_add1
(dst_b inA_b: buffer64)
(inB: nat64)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_get_ok va_s0 /\
(let a0:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let a1:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let a2:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let a3:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let a:Prims.nat = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
adx_enabled /\ bmi2_enabled /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRdi va_s0)
dst_b
4
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0)
inA_b
4
(va_get_mem_layout va_s0)
Secret /\ inB == va_get_reg64 rRdx va_s0) /\
(forall (va_x_mem: vale_heap) (va_x_rax: nat64) (va_x_rdx: nat64) (va_x_r8: nat64)
(va_x_r9: nat64) (va_x_r10: nat64) (va_x_r11: nat64) (va_x_heap0: vale_heap)
(va_x_memLayout: vale_heap_layout) (va_x_efl: Vale.X64.Flags.t).
let va_sM =
va_upd_flags va_x_efl
(va_upd_mem_layout va_x_memLayout
(va_upd_mem_heaplet 0
va_x_heap0
(va_upd_reg64 rR11
va_x_r11
(va_upd_reg64 rR10
va_x_r10
(va_upd_reg64 rR9
va_x_r9
(va_upd_reg64 rR8
va_x_r8
(va_upd_reg64 rRdx
va_x_rdx
(va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem va_s0))))))))
)
in
va_get_ok va_sM /\
(let a0:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let a1:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let a2:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let a3:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let a:Prims.nat = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in
let d1 = Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in
let d2 = Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in
let d3 = Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in
let d = Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in
d == a + va_get_reg64 rRdx va_s0 /\
Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem va_sM)) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.Curve25519.X64.FastUtil.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Curve25519.X64.FastUtil.fsti"
} | [
"total"
] | [
"Vale.X64.Memory.buffer64",
"Vale.X64.Memory.nat64",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.adx_enabled",
"Vale.X64.CPU_Features_s.bmi2_enabled",
"Vale.X64.Memory.is_initial_heap",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Decls.va_get_mem",
"Prims.l_or",
"Vale.X64.Decls.buffers_disjoint",
"Prims.eq2",
"Vale.X64.Decls.validDstAddrs64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRdi",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.validSrcAddrs64",
"Vale.X64.Machine_s.rRsi",
"Vale.Def.Words_s.nat64",
"Vale.X64.Machine_s.rRdx",
"Prims.nat",
"Vale.Curve25519.Fast_defs.pow2_four",
"Vale.X64.Decls.buffer64_read",
"Prims.l_Forall",
"Vale.X64.InsBasic.vale_heap",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Vale.X64.Flags.t",
"Prims.l_imp",
"Prims.int",
"Prims.op_Addition",
"Vale.X64.Decls.modifies_buffer",
"Vale.Curve25519.Fast_defs.pow2_five",
"Vale.X64.Machine_s.rRax",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_mem_layout",
"Vale.X64.Decls.va_upd_mem_heaplet",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Decls.va_upd_mem"
] | [] | module Vale.Curve25519.X64.FastUtil
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.Curve25519.Fast_defs
open Vale.X64.CPU_Features_s
//-- Fast_add1
val va_code_Fast_add1 : va_dummy:unit -> Tot va_code
val va_codegen_success_Fast_add1 : va_dummy:unit -> Tot va_pbool
let va_req_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
: prop =
(va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0))
let va_ens_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1 va_b0 va_s0 dst_b inA_b inB /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))
val va_lemma_Fast_add1 : va_b0:va_code -> va_s0:va_state -> dst_b:buffer64 -> inA_b:buffer64 ->
inB:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))
[@ va_qattr]
let va_wp_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) (va_s0:va_state) (va_k:(va_state | false | true | Vale.Curve25519.X64.FastUtil.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Fast_add1
(dst_b inA_b: buffer64)
(inB: nat64)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.Curve25519.X64.FastUtil.va_wp_Fast_add1 | {
"file_name": "obj/Vale.Curve25519.X64.FastUtil.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
dst_b: Vale.X64.Memory.buffer64 ->
inA_b: Vale.X64.Memory.buffer64 ->
inB: Vale.X64.Memory.nat64 ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 33,
"end_line": 116,
"start_col": 2,
"start_line": 89
} |
Prims.Tot | val va_req_Cswap2 (va_b0: va_code) (va_s0: va_state) (bit_in: nat64) (p0_b p1_b: buffer64) : prop | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_req_Cswap2 (va_b0:va_code) (va_s0:va_state) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) :
prop =
(va_require_total va_b0 (va_code_Cswap2 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0)
in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem
va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3
(va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let
(old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0)
in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem
va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3
(va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ bit_in == va_get_reg64 rRdi va_s0 /\
va_get_reg64 rRdi va_s0 <= 1 /\ (Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRdx va_s0) p1_b 8 (va_get_mem_layout va_s0) Secret)) | val va_req_Cswap2 (va_b0: va_code) (va_s0: va_state) (bit_in: nat64) (p0_b p1_b: buffer64) : prop
let va_req_Cswap2 (va_b0: va_code) (va_s0: va_state) (bit_in: nat64) (p0_b p1_b: buffer64) : prop = | false | null | false | (va_require_total va_b0 (va_code_Cswap2 ()) va_s0 /\ va_get_ok va_s0 /\
(let old_p0_0:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let old_p0_1:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in
let old_p0_2:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0) in
let old_p0_3:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in
let old_p0_4:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in
let old_p0_5:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let old_p0_6:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in
let old_p0_7:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in
let old_p1_0:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let old_p1_1:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in
let old_p1_2:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0) in
let old_p1_3:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in
let old_p1_4:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in
let old_p1_5:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let old_p1_6:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in
let old_p1_7:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
bit_in == va_get_reg64 rRdi va_s0 /\ va_get_reg64 rRdi va_s0 <= 1 /\
(Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0)
p0_b
8
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRdx va_s0)
p1_b
8
(va_get_mem_layout va_s0)
Secret)) | {
"checked_file": "Vale.Curve25519.X64.FastUtil.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Curve25519.X64.FastUtil.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_code",
"Vale.X64.Decls.va_state",
"Vale.X64.Memory.nat64",
"Vale.X64.Memory.buffer64",
"Prims.l_and",
"Vale.X64.Decls.va_require_total",
"Vale.Curve25519.X64.FastUtil.va_code_Cswap2",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.Memory.is_initial_heap",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Decls.va_get_mem",
"Prims.eq2",
"Vale.Def.Words_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRdi",
"Prims.op_LessThanOrEqual",
"Prims.l_or",
"Vale.X64.Decls.buffers_disjoint",
"Vale.X64.Decls.validDstAddrs64",
"Vale.X64.Machine_s.rRsi",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Decls.buffer64_read",
"Prims.prop"
] | [] | module Vale.Curve25519.X64.FastUtil
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.Curve25519.Fast_defs
open Vale.X64.CPU_Features_s
//-- Fast_add1
val va_code_Fast_add1 : va_dummy:unit -> Tot va_code
val va_codegen_success_Fast_add1 : va_dummy:unit -> Tot va_pbool
let va_req_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
: prop =
(va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0))
let va_ens_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1 va_b0 va_s0 dst_b inA_b inB /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))
val va_lemma_Fast_add1 : va_b0:va_code -> va_s0:va_state -> dst_b:buffer64 -> inA_b:buffer64 ->
inB:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))
[@ va_qattr]
let va_wp_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) (va_s0:va_state) (va_k:(va_state
-> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
adx_enabled /\ bmi2_enabled /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0)
(va_get_mem va_s0) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rdx:nat64) (va_x_r8:nat64)
(va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64) (va_x_heap0:vale_heap)
(va_x_memLayout:vale_heap_layout) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags
va_x_efl (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8
(va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem va_s0))))))))) in
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Fast_add1 : dst_b:buffer64 -> inA_b:buffer64 -> inB:nat64 -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1 dst_b inA_b inB va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1 ()) ([va_Mod_flags;
va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64
rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx; va_Mod_reg64 rRax; va_Mod_mem]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) : (va_quickCode unit
(va_code_Fast_add1 ())) =
(va_QProc (va_code_Fast_add1 ()) ([va_Mod_flags; va_Mod_mem_layout; va_Mod_mem_heaplet 0;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx;
va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1 dst_b inA_b inB) (va_wpProof_Fast_add1 dst_b
inA_b inB))
//--
//-- Fast_add1_stdcall
val va_code_Fast_add1_stdcall : win:bool -> Tot va_code
val va_codegen_success_Fast_add1_stdcall : win:bool -> Tot va_pbool
let va_req_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) : prop =
(va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret))
let va_ens_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1_stdcall va_b0 va_s0 win dst_b inA_b inB_in /\ va_ensure_total va_b0 va_s0 va_sM
va_fM /\ va_get_ok va_sM /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in
let a0 = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))
val va_lemma_Fast_add1_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> dst_b:buffer64 ->
inA_b:buffer64 -> inB_in:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let a0 = Vale.X64.Decls.buffer64_read
inA_b 0 (va_get_mem va_s0) in let a1 = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0)
in let a2 = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64)
(va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\
bmi2_enabled) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout
va_s0) Secret) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rbx:nat64)
(va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rsi:nat64) (va_x_rdi:nat64) (va_x_rbp:nat64)
(va_x_rsp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64)
(va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_heap0:vale_heap) (va_x_memLayout:vale_heap_layout) (va_x_stack:vale_stack)
(va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint va_x_stackTaint (va_upd_stack
va_x_stack (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags
va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13
(va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64
rR8 va_x_r8 (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64 rRdi va_x_rdi
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx va_x_rcx
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem
va_s0)))))))))))))))))))) in va_get_ok va_sM /\ (let (dst_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64
rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let a0 =
Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) ==> va_k va_sM (())))
val va_wpProof_Fast_add1_stdcall : win:bool -> dst_b:buffer64 -> inA_b:buffer64 -> inB_in:nat64 ->
va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1_stdcall win dst_b inA_b inB_in va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1_stdcall win)
([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags;
va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR11; va_Mod_reg64 rR10;
va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp; va_Mod_reg64 rRbp; va_Mod_reg64 rRdi;
va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax;
va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64) :
(va_quickCode unit (va_code_Fast_add1_stdcall win)) =
(va_QProc (va_code_Fast_add1_stdcall win) ([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout;
va_Mod_mem_heaplet 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp;
va_Mod_reg64 rRbp; va_Mod_reg64 rRdi; va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx;
va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1_stdcall win dst_b inA_b
inB_in) (va_wpProof_Fast_add1_stdcall win dst_b inA_b inB_in))
//--
//-- Cswap2
val va_code_Cswap2 : va_dummy:unit -> Tot va_code
val va_codegen_success_Cswap2 : va_dummy:unit -> Tot va_pbool
let va_req_Cswap2 (va_b0:va_code) (va_s0:va_state) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) : | false | true | Vale.Curve25519.X64.FastUtil.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_req_Cswap2 (va_b0: va_code) (va_s0: va_state) (bit_in: nat64) (p0_b p1_b: buffer64) : prop | [] | Vale.Curve25519.X64.FastUtil.va_req_Cswap2 | {
"file_name": "obj/Vale.Curve25519.X64.FastUtil.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
va_b0: Vale.X64.Decls.va_code ->
va_s0: Vale.X64.Decls.va_state ->
bit_in: Vale.X64.Memory.nat64 ->
p0_b: Vale.X64.Memory.buffer64 ->
p1_b: Vale.X64.Memory.buffer64
-> Prims.prop | {
"end_col": 71,
"end_line": 332,
"start_col": 2,
"start_line": 309
} |
Prims.Tot | val va_ens_Cswap2
(va_b0: va_code)
(va_s0: va_state)
(bit_in: nat64)
(p0_b p1_b: buffer64)
(va_sM: va_state)
(va_fM: va_fuel)
: prop | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_ens_Cswap2 (va_b0:va_code) (va_s0:va_state) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Cswap2 va_b0 va_s0 bit_in p0_b p1_b /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
1 (va_get_mem va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 2 (va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in let
(old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0)
in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem
va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0
(va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
1 (va_get_mem va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 2 (va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in let
(old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0)
in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem
va_s0) in Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\
(let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in let p0_1 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == (if (va_get_reg64 rRdi va_s0
= 1) then old_p1_0 else old_p0_0) /\ p0_1 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_1
else old_p0_1) /\ p0_2 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_2 else old_p0_2) /\
p0_3 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_3 else old_p0_3) /\ p0_4 == (if
(va_get_reg64 rRdi va_s0 = 1) then old_p1_4 else old_p0_4) /\ p0_5 == (if (va_get_reg64 rRdi
va_s0 = 1) then old_p1_5 else old_p0_5) /\ p0_6 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p1_6 else old_p0_6) /\ p0_7 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_7 else
old_p0_7) /\ p1_0 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_0 else old_p1_0) /\ p1_1 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_1 else old_p1_1) /\ p1_2 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_2 else old_p1_2) /\ p1_3 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p0_3 else old_p1_3) /\ p1_4 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_4 else
old_p1_4) /\ p1_5 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_5 else old_p1_5) /\ p1_6 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_6 else old_p1_6) /\ p1_7 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_7 else old_p1_7))) /\ va_state_eq va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_flags va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdi va_sM (va_update_ok
va_sM (va_update_mem va_sM va_s0)))))))))) | val va_ens_Cswap2
(va_b0: va_code)
(va_s0: va_state)
(bit_in: nat64)
(p0_b p1_b: buffer64)
(va_sM: va_state)
(va_fM: va_fuel)
: prop
let va_ens_Cswap2
(va_b0: va_code)
(va_s0: va_state)
(bit_in: nat64)
(p0_b p1_b: buffer64)
(va_sM: va_state)
(va_fM: va_fuel)
: prop = | false | null | false | (va_req_Cswap2 va_b0 va_s0 bit_in p0_b p1_b /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\
(let old_p0_0:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let old_p0_1:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in
let old_p0_2:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0) in
let old_p0_3:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in
let old_p0_4:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in
let old_p0_5:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let old_p0_6:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in
let old_p0_7:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in
let old_p1_0:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let old_p1_1:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in
let old_p1_2:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0) in
let old_p1_3:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in
let old_p1_4:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in
let old_p1_5:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let old_p1_6:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in
let old_p1_7:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in
Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\
(let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in
let p0_1 = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in
let p0_2 = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in
let p0_3 = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in
let p0_4 = Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in
let p0_5 = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in
let p0_6 = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in
let p0_7 = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in
let p1_0 = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in
let p1_1 = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in
let p1_2 = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in
let p1_3 = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in
let p1_4 = Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in
let p1_5 = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in
let p1_6 = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in
let p1_7 = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in
p0_0 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_0 else old_p0_0) /\
p0_1 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_1 else old_p0_1) /\
p0_2 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_2 else old_p0_2) /\
p0_3 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_3 else old_p0_3) /\
p0_4 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_4 else old_p0_4) /\
p0_5 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_5 else old_p0_5) /\
p0_6 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_6 else old_p0_6) /\
p0_7 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_7 else old_p0_7) /\
p1_0 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_0 else old_p1_0) /\
p1_1 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_1 else old_p1_1) /\
p1_2 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_2 else old_p1_2) /\
p1_3 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_3 else old_p1_3) /\
p1_4 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_4 else old_p1_4) /\
p1_5 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_5 else old_p1_5) /\
p1_6 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_6 else old_p1_6) /\
p1_7 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_7 else old_p1_7))) /\
va_state_eq va_sM
(va_update_mem_layout va_sM
(va_update_mem_heaplet 0
va_sM
(va_update_flags va_sM
(va_update_reg64 rR10
va_sM
(va_update_reg64 rR9
va_sM
(va_update_reg64 rR8
va_sM
(va_update_reg64 rRdi
va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))) | {
"checked_file": "Vale.Curve25519.X64.FastUtil.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Curve25519.X64.FastUtil.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_code",
"Vale.X64.Decls.va_state",
"Vale.X64.Memory.nat64",
"Vale.X64.Memory.buffer64",
"Vale.X64.Decls.va_fuel",
"Prims.l_and",
"Vale.Curve25519.X64.FastUtil.va_req_Cswap2",
"Vale.X64.Decls.va_ensure_total",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.Decls.modifies_buffer_2",
"Vale.X64.Decls.va_get_mem",
"Prims.eq2",
"Vale.Def.Words_s.nat64",
"Prims.op_Equality",
"Prims.int",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRdi",
"Prims.bool",
"Vale.X64.Decls.buffer64_read",
"Vale.X64.Decls.va_state_eq",
"Vale.X64.Decls.va_update_mem_layout",
"Vale.X64.Decls.va_update_mem_heaplet",
"Vale.X64.Decls.va_update_flags",
"Vale.X64.Decls.va_update_reg64",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Decls.va_update_ok",
"Vale.X64.Decls.va_update_mem",
"Prims.prop"
] | [] | module Vale.Curve25519.X64.FastUtil
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.Curve25519.Fast_defs
open Vale.X64.CPU_Features_s
//-- Fast_add1
val va_code_Fast_add1 : va_dummy:unit -> Tot va_code
val va_codegen_success_Fast_add1 : va_dummy:unit -> Tot va_pbool
let va_req_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
: prop =
(va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0))
let va_ens_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1 va_b0 va_s0 dst_b inA_b inB /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))
val va_lemma_Fast_add1 : va_b0:va_code -> va_s0:va_state -> dst_b:buffer64 -> inA_b:buffer64 ->
inB:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))
[@ va_qattr]
let va_wp_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) (va_s0:va_state) (va_k:(va_state
-> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
adx_enabled /\ bmi2_enabled /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0)
(va_get_mem va_s0) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rdx:nat64) (va_x_r8:nat64)
(va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64) (va_x_heap0:vale_heap)
(va_x_memLayout:vale_heap_layout) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags
va_x_efl (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8
(va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem va_s0))))))))) in
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Fast_add1 : dst_b:buffer64 -> inA_b:buffer64 -> inB:nat64 -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1 dst_b inA_b inB va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1 ()) ([va_Mod_flags;
va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64
rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx; va_Mod_reg64 rRax; va_Mod_mem]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) : (va_quickCode unit
(va_code_Fast_add1 ())) =
(va_QProc (va_code_Fast_add1 ()) ([va_Mod_flags; va_Mod_mem_layout; va_Mod_mem_heaplet 0;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx;
va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1 dst_b inA_b inB) (va_wpProof_Fast_add1 dst_b
inA_b inB))
//--
//-- Fast_add1_stdcall
val va_code_Fast_add1_stdcall : win:bool -> Tot va_code
val va_codegen_success_Fast_add1_stdcall : win:bool -> Tot va_pbool
let va_req_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) : prop =
(va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret))
let va_ens_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1_stdcall va_b0 va_s0 win dst_b inA_b inB_in /\ va_ensure_total va_b0 va_s0 va_sM
va_fM /\ va_get_ok va_sM /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in
let a0 = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))
val va_lemma_Fast_add1_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> dst_b:buffer64 ->
inA_b:buffer64 -> inB_in:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let a0 = Vale.X64.Decls.buffer64_read
inA_b 0 (va_get_mem va_s0) in let a1 = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0)
in let a2 = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64)
(va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\
bmi2_enabled) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout
va_s0) Secret) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rbx:nat64)
(va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rsi:nat64) (va_x_rdi:nat64) (va_x_rbp:nat64)
(va_x_rsp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64)
(va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_heap0:vale_heap) (va_x_memLayout:vale_heap_layout) (va_x_stack:vale_stack)
(va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint va_x_stackTaint (va_upd_stack
va_x_stack (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags
va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13
(va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64
rR8 va_x_r8 (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64 rRdi va_x_rdi
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx va_x_rcx
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem
va_s0)))))))))))))))))))) in va_get_ok va_sM /\ (let (dst_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64
rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let a0 =
Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) ==> va_k va_sM (())))
val va_wpProof_Fast_add1_stdcall : win:bool -> dst_b:buffer64 -> inA_b:buffer64 -> inB_in:nat64 ->
va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1_stdcall win dst_b inA_b inB_in va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1_stdcall win)
([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags;
va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR11; va_Mod_reg64 rR10;
va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp; va_Mod_reg64 rRbp; va_Mod_reg64 rRdi;
va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax;
va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64) :
(va_quickCode unit (va_code_Fast_add1_stdcall win)) =
(va_QProc (va_code_Fast_add1_stdcall win) ([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout;
va_Mod_mem_heaplet 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp;
va_Mod_reg64 rRbp; va_Mod_reg64 rRdi; va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx;
va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1_stdcall win dst_b inA_b
inB_in) (va_wpProof_Fast_add1_stdcall win dst_b inA_b inB_in))
//--
//-- Cswap2
val va_code_Cswap2 : va_dummy:unit -> Tot va_code
val va_codegen_success_Cswap2 : va_dummy:unit -> Tot va_pbool
let va_req_Cswap2 (va_b0:va_code) (va_s0:va_state) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) :
prop =
(va_require_total va_b0 (va_code_Cswap2 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0)
in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem
va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3
(va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let
(old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0)
in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem
va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3
(va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ bit_in == va_get_reg64 rRdi va_s0 /\
va_get_reg64 rRdi va_s0 <= 1 /\ (Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRdx va_s0) p1_b 8 (va_get_mem_layout va_s0) Secret))
let va_ens_Cswap2 (va_b0:va_code) (va_s0:va_state) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) | false | true | Vale.Curve25519.X64.FastUtil.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_ens_Cswap2
(va_b0: va_code)
(va_s0: va_state)
(bit_in: nat64)
(p0_b p1_b: buffer64)
(va_sM: va_state)
(va_fM: va_fuel)
: prop | [] | Vale.Curve25519.X64.FastUtil.va_ens_Cswap2 | {
"file_name": "obj/Vale.Curve25519.X64.FastUtil.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
va_b0: Vale.X64.Decls.va_code ->
va_s0: Vale.X64.Decls.va_state ->
bit_in: Vale.X64.Memory.nat64 ->
p0_b: Vale.X64.Memory.buffer64 ->
p1_b: Vale.X64.Memory.buffer64 ->
va_sM: Vale.X64.Decls.va_state ->
va_fM: Vale.X64.Decls.va_fuel
-> Prims.prop | {
"end_col": 46,
"end_line": 386,
"start_col": 2,
"start_line": 335
} |
Prims.Tot | val va_wp_Cswap2
(bit_in: nat64)
(p0_b p1_b: buffer64)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Cswap2 (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) (va_s0:va_state) (va_k:(va_state ->
unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
1 (va_get_mem va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 2 (va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in let
(old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0)
in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem
va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0
(va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
1 (va_get_mem va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 2 (va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in let
(old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0)
in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem
va_s0) in Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
bit_in == va_get_reg64 rRdi va_s0 /\ va_get_reg64 rRdi va_s0 <= 1 /\
(Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdx va_s0) p1_b 8
(va_get_mem_layout va_s0) Secret) /\ (forall (va_x_mem:vale_heap) (va_x_rdi:nat64)
(va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_heap0:vale_heap) (va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout
va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags va_x_efl (va_upd_reg64 rR10
va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRdi va_x_rdi
(va_upd_mem va_x_mem va_s0))))))) in va_get_ok va_sM /\ (let (old_p0_0:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in let
(old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in
let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0)
in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem
va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 4
(va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in let
(old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in
let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0)
in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem
va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 4
(va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Decls.modifies_buffer_2 p0_b
p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_sM) in let p0_1 = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let
p0_2 = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == va_if (va_get_reg64 rRdi
va_s0 = 1) (fun _ -> old_p1_0) (fun _ -> old_p0_0) /\ p0_1 == va_if (va_get_reg64 rRdi va_s0 =
1) (fun _ -> old_p1_1) (fun _ -> old_p0_1) /\ p0_2 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun
_ -> old_p1_2) (fun _ -> old_p0_2) /\ p0_3 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_3) (fun _ -> old_p0_3) /\ p0_4 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_4) (fun _ -> old_p0_4) /\ p0_5 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_5) (fun _ -> old_p0_5) /\ p0_6 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_6) (fun _ -> old_p0_6) /\ p0_7 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_7) (fun _ -> old_p0_7) /\ p1_0 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_0) (fun _ -> old_p1_0) /\ p1_1 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_1) (fun _ -> old_p1_1) /\ p1_2 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_2) (fun _ -> old_p1_2) /\ p1_3 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_3) (fun _ -> old_p1_3) /\ p1_4 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_4) (fun _ -> old_p1_4) /\ p1_5 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_5) (fun _ -> old_p1_5) /\ p1_6 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_6) (fun _ -> old_p1_6) /\ p1_7 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_7) (fun _ -> old_p1_7))) ==> va_k va_sM (()))) | val va_wp_Cswap2
(bit_in: nat64)
(p0_b p1_b: buffer64)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_Cswap2
(bit_in: nat64)
(p0_b p1_b: buffer64)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_get_ok va_s0 /\
(let old_p0_0:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let old_p0_1:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in
let old_p0_2:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0) in
let old_p0_3:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in
let old_p0_4:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in
let old_p0_5:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let old_p0_6:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in
let old_p0_7:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in
let old_p1_0:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let old_p1_1:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in
let old_p1_2:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0) in
let old_p1_3:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in
let old_p1_4:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in
let old_p1_5:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let old_p1_6:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in
let old_p1_7:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
bit_in == va_get_reg64 rRdi va_s0 /\ va_get_reg64 rRdi va_s0 <= 1 /\
(Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0)
p0_b
8
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRdx va_s0)
p1_b
8
(va_get_mem_layout va_s0)
Secret) /\
(forall (va_x_mem: vale_heap)
(va_x_rdi: nat64)
(va_x_r8: nat64)
(va_x_r9: nat64)
(va_x_r10: nat64)
(va_x_efl: Vale.X64.Flags.t)
(va_x_heap0: vale_heap)
(va_x_memLayout: vale_heap_layout).
let va_sM =
va_upd_mem_layout va_x_memLayout
(va_upd_mem_heaplet 0
va_x_heap0
(va_upd_flags va_x_efl
(va_upd_reg64 rR10
va_x_r10
(va_upd_reg64 rR9
va_x_r9
(va_upd_reg64 rR8
va_x_r8
(va_upd_reg64 rRdi va_x_rdi (va_upd_mem va_x_mem va_s0)))))))
in
va_get_ok va_sM /\
(let old_p0_0:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0)
in
let old_p0_1:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0)
in
let old_p0_2:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0)
in
let old_p0_3:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0)
in
let old_p0_4:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0)
in
let old_p0_5:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0)
in
let old_p0_6:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0)
in
let old_p0_7:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0)
in
let old_p1_0:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0)
in
let old_p1_1:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0)
in
let old_p1_2:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0)
in
let old_p1_3:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0)
in
let old_p1_4:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0)
in
let old_p1_5:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0)
in
let old_p1_6:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0)
in
let old_p1_7:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0)
in
Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\
(let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in
let p0_1 = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in
let p0_2 = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in
let p0_3 = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in
let p0_4 = Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in
let p0_5 = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in
let p0_6 = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in
let p0_7 = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in
let p1_0 = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in
let p1_1 = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in
let p1_2 = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in
let p1_3 = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in
let p1_4 = Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in
let p1_5 = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in
let p1_6 = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in
let p1_7 = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in
p0_0 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ -> old_p1_0) (fun _ -> old_p0_0) /\
p0_1 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ -> old_p1_1) (fun _ -> old_p0_1) /\
p0_2 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ -> old_p1_2) (fun _ -> old_p0_2) /\
p0_3 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ -> old_p1_3) (fun _ -> old_p0_3) /\
p0_4 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ -> old_p1_4) (fun _ -> old_p0_4) /\
p0_5 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ -> old_p1_5) (fun _ -> old_p0_5) /\
p0_6 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ -> old_p1_6) (fun _ -> old_p0_6) /\
p0_7 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ -> old_p1_7) (fun _ -> old_p0_7) /\
p1_0 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ -> old_p0_0) (fun _ -> old_p1_0) /\
p1_1 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ -> old_p0_1) (fun _ -> old_p1_1) /\
p1_2 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ -> old_p0_2) (fun _ -> old_p1_2) /\
p1_3 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ -> old_p0_3) (fun _ -> old_p1_3) /\
p1_4 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ -> old_p0_4) (fun _ -> old_p1_4) /\
p1_5 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ -> old_p0_5) (fun _ -> old_p1_5) /\
p1_6 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ -> old_p0_6) (fun _ -> old_p1_6) /\
p1_7 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ -> old_p0_7) (fun _ -> old_p1_7))) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.Curve25519.X64.FastUtil.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Curve25519.X64.FastUtil.fsti"
} | [
"total"
] | [
"Vale.X64.Memory.nat64",
"Vale.X64.Memory.buffer64",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.Memory.is_initial_heap",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Decls.va_get_mem",
"Prims.eq2",
"Vale.Def.Words_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRdi",
"Prims.op_LessThanOrEqual",
"Prims.l_or",
"Vale.X64.Decls.buffers_disjoint",
"Vale.X64.Decls.validDstAddrs64",
"Vale.X64.Machine_s.rRsi",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Decls.buffer64_read",
"Prims.l_Forall",
"Vale.X64.InsBasic.vale_heap",
"Vale.X64.Flags.t",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Prims.l_imp",
"Vale.X64.Decls.modifies_buffer_2",
"Vale.X64.Decls.va_if",
"Prims.op_Equality",
"Prims.int",
"Prims.l_not",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_mem_layout",
"Vale.X64.Decls.va_upd_mem_heaplet",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Decls.va_upd_mem"
] | [] | module Vale.Curve25519.X64.FastUtil
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.Curve25519.Fast_defs
open Vale.X64.CPU_Features_s
//-- Fast_add1
val va_code_Fast_add1 : va_dummy:unit -> Tot va_code
val va_codegen_success_Fast_add1 : va_dummy:unit -> Tot va_pbool
let va_req_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
: prop =
(va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0))
let va_ens_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1 va_b0 va_s0 dst_b inA_b inB /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))
val va_lemma_Fast_add1 : va_b0:va_code -> va_s0:va_state -> dst_b:buffer64 -> inA_b:buffer64 ->
inB:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))
[@ va_qattr]
let va_wp_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) (va_s0:va_state) (va_k:(va_state
-> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
adx_enabled /\ bmi2_enabled /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0)
(va_get_mem va_s0) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rdx:nat64) (va_x_r8:nat64)
(va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64) (va_x_heap0:vale_heap)
(va_x_memLayout:vale_heap_layout) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags
va_x_efl (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8
(va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem va_s0))))))))) in
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Fast_add1 : dst_b:buffer64 -> inA_b:buffer64 -> inB:nat64 -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1 dst_b inA_b inB va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1 ()) ([va_Mod_flags;
va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64
rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx; va_Mod_reg64 rRax; va_Mod_mem]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) : (va_quickCode unit
(va_code_Fast_add1 ())) =
(va_QProc (va_code_Fast_add1 ()) ([va_Mod_flags; va_Mod_mem_layout; va_Mod_mem_heaplet 0;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx;
va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1 dst_b inA_b inB) (va_wpProof_Fast_add1 dst_b
inA_b inB))
//--
//-- Fast_add1_stdcall
val va_code_Fast_add1_stdcall : win:bool -> Tot va_code
val va_codegen_success_Fast_add1_stdcall : win:bool -> Tot va_pbool
let va_req_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) : prop =
(va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret))
let va_ens_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1_stdcall va_b0 va_s0 win dst_b inA_b inB_in /\ va_ensure_total va_b0 va_s0 va_sM
va_fM /\ va_get_ok va_sM /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in
let a0 = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))
val va_lemma_Fast_add1_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> dst_b:buffer64 ->
inA_b:buffer64 -> inB_in:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let a0 = Vale.X64.Decls.buffer64_read
inA_b 0 (va_get_mem va_s0) in let a1 = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0)
in let a2 = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64)
(va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\
bmi2_enabled) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout
va_s0) Secret) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rbx:nat64)
(va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rsi:nat64) (va_x_rdi:nat64) (va_x_rbp:nat64)
(va_x_rsp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64)
(va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_heap0:vale_heap) (va_x_memLayout:vale_heap_layout) (va_x_stack:vale_stack)
(va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint va_x_stackTaint (va_upd_stack
va_x_stack (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags
va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13
(va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64
rR8 va_x_r8 (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64 rRdi va_x_rdi
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx va_x_rcx
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem
va_s0)))))))))))))))))))) in va_get_ok va_sM /\ (let (dst_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64
rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let a0 =
Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) ==> va_k va_sM (())))
val va_wpProof_Fast_add1_stdcall : win:bool -> dst_b:buffer64 -> inA_b:buffer64 -> inB_in:nat64 ->
va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1_stdcall win dst_b inA_b inB_in va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1_stdcall win)
([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags;
va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR11; va_Mod_reg64 rR10;
va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp; va_Mod_reg64 rRbp; va_Mod_reg64 rRdi;
va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax;
va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64) :
(va_quickCode unit (va_code_Fast_add1_stdcall win)) =
(va_QProc (va_code_Fast_add1_stdcall win) ([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout;
va_Mod_mem_heaplet 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp;
va_Mod_reg64 rRbp; va_Mod_reg64 rRdi; va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx;
va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1_stdcall win dst_b inA_b
inB_in) (va_wpProof_Fast_add1_stdcall win dst_b inA_b inB_in))
//--
//-- Cswap2
val va_code_Cswap2 : va_dummy:unit -> Tot va_code
val va_codegen_success_Cswap2 : va_dummy:unit -> Tot va_pbool
let va_req_Cswap2 (va_b0:va_code) (va_s0:va_state) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) :
prop =
(va_require_total va_b0 (va_code_Cswap2 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0)
in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem
va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3
(va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let
(old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0)
in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem
va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3
(va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ bit_in == va_get_reg64 rRdi va_s0 /\
va_get_reg64 rRdi va_s0 <= 1 /\ (Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRdx va_s0) p1_b 8 (va_get_mem_layout va_s0) Secret))
let va_ens_Cswap2 (va_b0:va_code) (va_s0:va_state) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Cswap2 va_b0 va_s0 bit_in p0_b p1_b /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
1 (va_get_mem va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 2 (va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in let
(old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0)
in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem
va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0
(va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
1 (va_get_mem va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 2 (va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in let
(old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0)
in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem
va_s0) in Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\
(let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in let p0_1 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == (if (va_get_reg64 rRdi va_s0
= 1) then old_p1_0 else old_p0_0) /\ p0_1 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_1
else old_p0_1) /\ p0_2 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_2 else old_p0_2) /\
p0_3 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_3 else old_p0_3) /\ p0_4 == (if
(va_get_reg64 rRdi va_s0 = 1) then old_p1_4 else old_p0_4) /\ p0_5 == (if (va_get_reg64 rRdi
va_s0 = 1) then old_p1_5 else old_p0_5) /\ p0_6 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p1_6 else old_p0_6) /\ p0_7 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_7 else
old_p0_7) /\ p1_0 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_0 else old_p1_0) /\ p1_1 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_1 else old_p1_1) /\ p1_2 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_2 else old_p1_2) /\ p1_3 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p0_3 else old_p1_3) /\ p1_4 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_4 else
old_p1_4) /\ p1_5 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_5 else old_p1_5) /\ p1_6 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_6 else old_p1_6) /\ p1_7 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_7 else old_p1_7))) /\ va_state_eq va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_flags va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdi va_sM (va_update_ok
va_sM (va_update_mem va_sM va_s0))))))))))
val va_lemma_Cswap2 : va_b0:va_code -> va_s0:va_state -> bit_in:nat64 -> p0_b:buffer64 ->
p1_b:buffer64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Cswap2 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0)
in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem
va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3
(va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let
(old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0)
in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem
va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3
(va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ bit_in == va_get_reg64 rRdi va_s0 /\
va_get_reg64 rRdi va_s0 <= 1 /\ (Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRdx va_s0) p1_b 8 (va_get_mem_layout va_s0) Secret)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0)
in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem
va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2
(va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let
(old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in
let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0)
in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem
va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2
(va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let
(old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in
Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (let p0_0 =
Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in let p0_1 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == (if (va_get_reg64 rRdi va_s0
= 1) then old_p1_0 else old_p0_0) /\ p0_1 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_1
else old_p0_1) /\ p0_2 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_2 else old_p0_2) /\
p0_3 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_3 else old_p0_3) /\ p0_4 == (if
(va_get_reg64 rRdi va_s0 = 1) then old_p1_4 else old_p0_4) /\ p0_5 == (if (va_get_reg64 rRdi
va_s0 = 1) then old_p1_5 else old_p0_5) /\ p0_6 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p1_6 else old_p0_6) /\ p0_7 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_7 else
old_p0_7) /\ p1_0 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_0 else old_p1_0) /\ p1_1 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_1 else old_p1_1) /\ p1_2 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_2 else old_p1_2) /\ p1_3 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p0_3 else old_p1_3) /\ p1_4 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_4 else
old_p1_4) /\ p1_5 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_5 else old_p1_5) /\ p1_6 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_6 else old_p1_6) /\ p1_7 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_7 else old_p1_7))) /\ va_state_eq va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_flags va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdi va_sM (va_update_ok
va_sM (va_update_mem va_sM va_s0)))))))))))
[@ va_qattr]
let va_wp_Cswap2 (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) (va_s0:va_state) (va_k:(va_state -> | false | true | Vale.Curve25519.X64.FastUtil.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Cswap2
(bit_in: nat64)
(p0_b p1_b: buffer64)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.Curve25519.X64.FastUtil.va_wp_Cswap2 | {
"file_name": "obj/Vale.Curve25519.X64.FastUtil.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
bit_in: Vale.X64.Memory.nat64 ->
p0_b: Vale.X64.Memory.buffer64 ->
p1_b: Vale.X64.Memory.buffer64 ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 57,
"end_line": 548,
"start_col": 2,
"start_line": 470
} |
Prims.Tot | val va_quick_Fast_add1 (dst_b inA_b: buffer64) (inB: nat64)
: (va_quickCode unit (va_code_Fast_add1 ())) | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) : (va_quickCode unit
(va_code_Fast_add1 ())) =
(va_QProc (va_code_Fast_add1 ()) ([va_Mod_flags; va_Mod_mem_layout; va_Mod_mem_heaplet 0;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx;
va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1 dst_b inA_b inB) (va_wpProof_Fast_add1 dst_b
inA_b inB)) | val va_quick_Fast_add1 (dst_b inA_b: buffer64) (inB: nat64)
: (va_quickCode unit (va_code_Fast_add1 ()))
let va_quick_Fast_add1 (dst_b inA_b: buffer64) (inB: nat64)
: (va_quickCode unit (va_code_Fast_add1 ())) = | false | null | false | (va_QProc (va_code_Fast_add1 ())
([
va_Mod_flags; va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_reg64 rR11; va_Mod_reg64 rR10;
va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx; va_Mod_reg64 rRax; va_Mod_mem
])
(va_wp_Fast_add1 dst_b inA_b inB)
(va_wpProof_Fast_add1 dst_b inA_b inB)) | {
"checked_file": "Vale.Curve25519.X64.FastUtil.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Curve25519.X64.FastUtil.fsti"
} | [
"total"
] | [
"Vale.X64.Memory.buffer64",
"Vale.X64.Memory.nat64",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.Curve25519.X64.FastUtil.va_code_Fast_add1",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_mem_layout",
"Vale.X64.QuickCode.va_Mod_mem_heaplet",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Machine_s.rRax",
"Vale.X64.QuickCode.va_Mod_mem",
"Prims.Nil",
"Vale.Curve25519.X64.FastUtil.va_wp_Fast_add1",
"Vale.Curve25519.X64.FastUtil.va_wpProof_Fast_add1",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.Curve25519.X64.FastUtil
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.Curve25519.Fast_defs
open Vale.X64.CPU_Features_s
//-- Fast_add1
val va_code_Fast_add1 : va_dummy:unit -> Tot va_code
val va_codegen_success_Fast_add1 : va_dummy:unit -> Tot va_pbool
let va_req_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
: prop =
(va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0))
let va_ens_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1 va_b0 va_s0 dst_b inA_b inB /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))
val va_lemma_Fast_add1 : va_b0:va_code -> va_s0:va_state -> dst_b:buffer64 -> inA_b:buffer64 ->
inB:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))
[@ va_qattr]
let va_wp_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) (va_s0:va_state) (va_k:(va_state
-> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
adx_enabled /\ bmi2_enabled /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0)
(va_get_mem va_s0) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rdx:nat64) (va_x_r8:nat64)
(va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64) (va_x_heap0:vale_heap)
(va_x_memLayout:vale_heap_layout) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags
va_x_efl (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8
(va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem va_s0))))))))) in
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Fast_add1 : dst_b:buffer64 -> inA_b:buffer64 -> inB:nat64 -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1 dst_b inA_b inB va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1 ()) ([va_Mod_flags;
va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64
rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx; va_Mod_reg64 rRax; va_Mod_mem]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) : (va_quickCode unit | false | false | Vale.Curve25519.X64.FastUtil.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Fast_add1 (dst_b inA_b: buffer64) (inB: nat64)
: (va_quickCode unit (va_code_Fast_add1 ())) | [] | Vale.Curve25519.X64.FastUtil.va_quick_Fast_add1 | {
"file_name": "obj/Vale.Curve25519.X64.FastUtil.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | dst_b: Vale.X64.Memory.buffer64 -> inA_b: Vale.X64.Memory.buffer64 -> inB: Vale.X64.Memory.nat64
-> Vale.X64.QuickCode.va_quickCode Prims.unit (Vale.Curve25519.X64.FastUtil.va_code_Fast_add1 ()) | {
"end_col": 15,
"end_line": 132,
"start_col": 2,
"start_line": 129
} |
Prims.Tot | val va_req_Fast_add1_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(dst_b inA_b: buffer64)
(inB_in: nat64)
: prop | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_req_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) : prop =
(va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret)) | val va_req_Fast_add1_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(dst_b inA_b: buffer64)
(inB_in: nat64)
: prop
let va_req_Fast_add1_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(dst_b inA_b: buffer64)
(inB_in: nat64)
: prop = | false | null | false | (va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\
(let dst_in:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0)
in
let inA_in:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0)
in
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(adx_enabled /\ bmi2_enabled) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
inB_in = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
dst_in
dst_b
4
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
inA_in
inA_b
4
(va_get_mem_layout va_s0)
Secret)) | {
"checked_file": "Vale.Curve25519.X64.FastUtil.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Curve25519.X64.FastUtil.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_code",
"Vale.X64.Decls.va_state",
"Prims.bool",
"Vale.X64.Memory.buffer64",
"Vale.X64.Memory.nat64",
"Prims.l_and",
"Vale.X64.Decls.va_require_total",
"Vale.Curve25519.X64.FastUtil.va_code_Fast_add1_stdcall",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Prims.eq2",
"Vale.Def.Words_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Stack_i.init_rsp",
"Vale.X64.Decls.va_get_stack",
"Vale.X64.Memory.is_initial_heap",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Decls.va_get_mem",
"Vale.X64.CPU_Features_s.adx_enabled",
"Vale.X64.CPU_Features_s.bmi2_enabled",
"Prims.l_or",
"Vale.X64.Decls.buffers_disjoint",
"Prims.op_Equality",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Decls.validDstAddrs64",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.validSrcAddrs64",
"Vale.X64.Decls.va_int_range",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rRdi",
"Prims.prop"
] | [] | module Vale.Curve25519.X64.FastUtil
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.Curve25519.Fast_defs
open Vale.X64.CPU_Features_s
//-- Fast_add1
val va_code_Fast_add1 : va_dummy:unit -> Tot va_code
val va_codegen_success_Fast_add1 : va_dummy:unit -> Tot va_pbool
let va_req_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
: prop =
(va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0))
let va_ens_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1 va_b0 va_s0 dst_b inA_b inB /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))
val va_lemma_Fast_add1 : va_b0:va_code -> va_s0:va_state -> dst_b:buffer64 -> inA_b:buffer64 ->
inB:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))
[@ va_qattr]
let va_wp_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) (va_s0:va_state) (va_k:(va_state
-> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
adx_enabled /\ bmi2_enabled /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0)
(va_get_mem va_s0) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rdx:nat64) (va_x_r8:nat64)
(va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64) (va_x_heap0:vale_heap)
(va_x_memLayout:vale_heap_layout) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags
va_x_efl (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8
(va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem va_s0))))))))) in
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Fast_add1 : dst_b:buffer64 -> inA_b:buffer64 -> inB:nat64 -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1 dst_b inA_b inB va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1 ()) ([va_Mod_flags;
va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64
rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx; va_Mod_reg64 rRax; va_Mod_mem]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) : (va_quickCode unit
(va_code_Fast_add1 ())) =
(va_QProc (va_code_Fast_add1 ()) ([va_Mod_flags; va_Mod_mem_layout; va_Mod_mem_heaplet 0;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx;
va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1 dst_b inA_b inB) (va_wpProof_Fast_add1 dst_b
inA_b inB))
//--
//-- Fast_add1_stdcall
val va_code_Fast_add1_stdcall : win:bool -> Tot va_code
val va_codegen_success_Fast_add1_stdcall : win:bool -> Tot va_pbool
let va_req_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64) | false | true | Vale.Curve25519.X64.FastUtil.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_req_Fast_add1_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(dst_b inA_b: buffer64)
(inB_in: nat64)
: prop | [] | Vale.Curve25519.X64.FastUtil.va_req_Fast_add1_stdcall | {
"file_name": "obj/Vale.Curve25519.X64.FastUtil.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
va_b0: Vale.X64.Decls.va_code ->
va_s0: Vale.X64.Decls.va_state ->
win: Prims.bool ->
dst_b: Vale.X64.Memory.buffer64 ->
inA_b: Vale.X64.Memory.buffer64 ->
inB_in: Vale.X64.Memory.nat64
-> Prims.prop | {
"end_col": 12,
"end_line": 151,
"start_col": 2,
"start_line": 141
} |
Prims.Tot | val va_ens_Fast_add1_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(dst_b inA_b: buffer64)
(inB_in: nat64)
(va_sM: va_state)
(va_fM: va_fuel)
: prop | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_ens_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1_stdcall va_b0 va_s0 win dst_b inA_b inB_in /\ va_ensure_total va_b0 va_s0 va_sM
va_fM /\ va_get_ok va_sM /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in
let a0 = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))))))) | val va_ens_Fast_add1_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(dst_b inA_b: buffer64)
(inB_in: nat64)
(va_sM: va_state)
(va_fM: va_fuel)
: prop
let va_ens_Fast_add1_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(dst_b inA_b: buffer64)
(inB_in: nat64)
(va_sM: va_state)
(va_fM: va_fuel)
: prop = | false | null | false | (va_req_Fast_add1_stdcall va_b0 va_s0 win dst_b inA_b inB_in /\
va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let dst_in:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0)
in
let inA_in:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0)
in
let a0 = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let a1 = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let a2 = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let a3 = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in
let d1 = Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in
let d2 = Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in
let d3 = Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in
let a = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d = Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in
d == a + inB_in /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\
(win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\
(win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\
(win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\
(~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
va_state_eq va_sM
(va_update_stackTaint va_sM
(va_update_stack va_sM
(va_update_mem_layout va_sM
(va_update_mem_heaplet 0
va_sM
(va_update_flags va_sM
(va_update_reg64 rR15
va_sM
(va_update_reg64 rR14
va_sM
(va_update_reg64 rR13
va_sM
(va_update_reg64 rR11
va_sM
(va_update_reg64 rR10
va_sM
(va_update_reg64 rR9
va_sM
(va_update_reg64 rR8
va_sM
(va_update_reg64 rRsp
va_sM
(va_update_reg64 rRbp
va_sM
(va_update_reg64 rRdi
va_sM
(va_update_reg64 rRsi
va_sM
(va_update_reg64 rRdx
va_sM
(va_update_reg64 rRcx
va_sM
(va_update_reg64 rRbx
va_sM
(va_update_reg64 rRax
va_sM
(va_update_ok va_sM
(va_update_mem
va_sM
va_s0)))
)))))))))))))))))))) | {
"checked_file": "Vale.Curve25519.X64.FastUtil.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Curve25519.X64.FastUtil.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_code",
"Vale.X64.Decls.va_state",
"Prims.bool",
"Vale.X64.Memory.buffer64",
"Vale.X64.Memory.nat64",
"Vale.X64.Decls.va_fuel",
"Prims.l_and",
"Vale.Curve25519.X64.FastUtil.va_req_Fast_add1_stdcall",
"Vale.X64.Decls.va_ensure_total",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Prims.eq2",
"Prims.int",
"Prims.op_Addition",
"Vale.X64.Decls.modifies_buffer",
"Vale.X64.Decls.va_get_mem",
"Prims.l_imp",
"Vale.Def.Types_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR15",
"Prims.l_not",
"Prims.nat",
"Vale.Curve25519.Fast_defs.pow2_five",
"Vale.X64.Machine_s.rRax",
"Vale.Curve25519.Fast_defs.pow2_four",
"Vale.Def.Words_s.nat64",
"Vale.X64.Decls.buffer64_read",
"Vale.X64.Decls.va_int_range",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Decls.va_state_eq",
"Vale.X64.Decls.va_update_stackTaint",
"Vale.X64.Decls.va_update_stack",
"Vale.X64.Decls.va_update_mem_layout",
"Vale.X64.Decls.va_update_mem_heaplet",
"Vale.X64.Decls.va_update_flags",
"Vale.X64.Decls.va_update_reg64",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Decls.va_update_ok",
"Vale.X64.Decls.va_update_mem",
"Prims.prop"
] | [] | module Vale.Curve25519.X64.FastUtil
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.Curve25519.Fast_defs
open Vale.X64.CPU_Features_s
//-- Fast_add1
val va_code_Fast_add1 : va_dummy:unit -> Tot va_code
val va_codegen_success_Fast_add1 : va_dummy:unit -> Tot va_pbool
let va_req_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
: prop =
(va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0))
let va_ens_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1 va_b0 va_s0 dst_b inA_b inB /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))
val va_lemma_Fast_add1 : va_b0:va_code -> va_s0:va_state -> dst_b:buffer64 -> inA_b:buffer64 ->
inB:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))
[@ va_qattr]
let va_wp_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) (va_s0:va_state) (va_k:(va_state
-> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
adx_enabled /\ bmi2_enabled /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0)
(va_get_mem va_s0) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rdx:nat64) (va_x_r8:nat64)
(va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64) (va_x_heap0:vale_heap)
(va_x_memLayout:vale_heap_layout) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags
va_x_efl (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8
(va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem va_s0))))))))) in
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Fast_add1 : dst_b:buffer64 -> inA_b:buffer64 -> inB:nat64 -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1 dst_b inA_b inB va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1 ()) ([va_Mod_flags;
va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64
rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx; va_Mod_reg64 rRax; va_Mod_mem]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) : (va_quickCode unit
(va_code_Fast_add1 ())) =
(va_QProc (va_code_Fast_add1 ()) ([va_Mod_flags; va_Mod_mem_layout; va_Mod_mem_heaplet 0;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx;
va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1 dst_b inA_b inB) (va_wpProof_Fast_add1 dst_b
inA_b inB))
//--
//-- Fast_add1_stdcall
val va_code_Fast_add1_stdcall : win:bool -> Tot va_code
val va_codegen_success_Fast_add1_stdcall : win:bool -> Tot va_pbool
let va_req_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) : prop =
(va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret))
let va_ens_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64) | false | true | Vale.Curve25519.X64.FastUtil.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_ens_Fast_add1_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(dst_b inA_b: buffer64)
(inB_in: nat64)
(va_sM: va_state)
(va_fM: va_fuel)
: prop | [] | Vale.Curve25519.X64.FastUtil.va_ens_Fast_add1_stdcall | {
"file_name": "obj/Vale.Curve25519.X64.FastUtil.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
va_b0: Vale.X64.Decls.va_code ->
va_s0: Vale.X64.Decls.va_state ->
win: Prims.bool ->
dst_b: Vale.X64.Memory.buffer64 ->
inA_b: Vale.X64.Memory.buffer64 ->
inB_in: Vale.X64.Memory.nat64 ->
va_sM: Vale.X64.Decls.va_state ->
va_fM: Vale.X64.Decls.va_fuel
-> Prims.prop | {
"end_col": 73,
"end_line": 185,
"start_col": 2,
"start_line": 154
} |
Prims.Tot | val va_quick_Fast_add1_stdcall (win: bool) (dst_b inA_b: buffer64) (inB_in: nat64)
: (va_quickCode unit (va_code_Fast_add1_stdcall win)) | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64) :
(va_quickCode unit (va_code_Fast_add1_stdcall win)) =
(va_QProc (va_code_Fast_add1_stdcall win) ([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout;
va_Mod_mem_heaplet 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp;
va_Mod_reg64 rRbp; va_Mod_reg64 rRdi; va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx;
va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1_stdcall win dst_b inA_b
inB_in) (va_wpProof_Fast_add1_stdcall win dst_b inA_b inB_in)) | val va_quick_Fast_add1_stdcall (win: bool) (dst_b inA_b: buffer64) (inB_in: nat64)
: (va_quickCode unit (va_code_Fast_add1_stdcall win))
let va_quick_Fast_add1_stdcall (win: bool) (dst_b inA_b: buffer64) (inB_in: nat64)
: (va_quickCode unit (va_code_Fast_add1_stdcall win)) = | false | null | false | (va_QProc (va_code_Fast_add1_stdcall win)
([
va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags;
va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR11;
va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp; va_Mod_reg64 rRbp;
va_Mod_reg64 rRdi; va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx;
va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem
])
(va_wp_Fast_add1_stdcall win dst_b inA_b inB_in)
(va_wpProof_Fast_add1_stdcall win dst_b inA_b inB_in)) | {
"checked_file": "Vale.Curve25519.X64.FastUtil.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Curve25519.X64.FastUtil.fsti"
} | [
"total"
] | [
"Prims.bool",
"Vale.X64.Memory.buffer64",
"Vale.X64.Memory.nat64",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.Curve25519.X64.FastUtil.va_code_Fast_add1_stdcall",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_stackTaint",
"Vale.X64.QuickCode.va_Mod_stack",
"Vale.X64.QuickCode.va_Mod_mem_layout",
"Vale.X64.QuickCode.va_Mod_mem_heaplet",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rR15",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRax",
"Vale.X64.QuickCode.va_Mod_mem",
"Prims.Nil",
"Vale.Curve25519.X64.FastUtil.va_wp_Fast_add1_stdcall",
"Vale.Curve25519.X64.FastUtil.va_wpProof_Fast_add1_stdcall",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.Curve25519.X64.FastUtil
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.Curve25519.Fast_defs
open Vale.X64.CPU_Features_s
//-- Fast_add1
val va_code_Fast_add1 : va_dummy:unit -> Tot va_code
val va_codegen_success_Fast_add1 : va_dummy:unit -> Tot va_pbool
let va_req_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
: prop =
(va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0))
let va_ens_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1 va_b0 va_s0 dst_b inA_b inB /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))
val va_lemma_Fast_add1 : va_b0:va_code -> va_s0:va_state -> dst_b:buffer64 -> inA_b:buffer64 ->
inB:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))
[@ va_qattr]
let va_wp_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) (va_s0:va_state) (va_k:(va_state
-> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
adx_enabled /\ bmi2_enabled /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0)
(va_get_mem va_s0) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rdx:nat64) (va_x_r8:nat64)
(va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64) (va_x_heap0:vale_heap)
(va_x_memLayout:vale_heap_layout) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags
va_x_efl (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8
(va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem va_s0))))))))) in
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Fast_add1 : dst_b:buffer64 -> inA_b:buffer64 -> inB:nat64 -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1 dst_b inA_b inB va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1 ()) ([va_Mod_flags;
va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64
rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx; va_Mod_reg64 rRax; va_Mod_mem]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) : (va_quickCode unit
(va_code_Fast_add1 ())) =
(va_QProc (va_code_Fast_add1 ()) ([va_Mod_flags; va_Mod_mem_layout; va_Mod_mem_heaplet 0;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx;
va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1 dst_b inA_b inB) (va_wpProof_Fast_add1 dst_b
inA_b inB))
//--
//-- Fast_add1_stdcall
val va_code_Fast_add1_stdcall : win:bool -> Tot va_code
val va_codegen_success_Fast_add1_stdcall : win:bool -> Tot va_pbool
let va_req_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) : prop =
(va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret))
let va_ens_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1_stdcall va_b0 va_s0 win dst_b inA_b inB_in /\ va_ensure_total va_b0 va_s0 va_sM
va_fM /\ va_get_ok va_sM /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in
let a0 = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))
val va_lemma_Fast_add1_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> dst_b:buffer64 ->
inA_b:buffer64 -> inB_in:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let a0 = Vale.X64.Decls.buffer64_read
inA_b 0 (va_get_mem va_s0) in let a1 = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0)
in let a2 = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64)
(va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\
bmi2_enabled) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout
va_s0) Secret) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rbx:nat64)
(va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rsi:nat64) (va_x_rdi:nat64) (va_x_rbp:nat64)
(va_x_rsp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64)
(va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_heap0:vale_heap) (va_x_memLayout:vale_heap_layout) (va_x_stack:vale_stack)
(va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint va_x_stackTaint (va_upd_stack
va_x_stack (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags
va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13
(va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64
rR8 va_x_r8 (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64 rRdi va_x_rdi
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx va_x_rcx
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem
va_s0)))))))))))))))))))) in va_get_ok va_sM /\ (let (dst_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64
rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let a0 =
Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) ==> va_k va_sM (())))
val va_wpProof_Fast_add1_stdcall : win:bool -> dst_b:buffer64 -> inA_b:buffer64 -> inB_in:nat64 ->
va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1_stdcall win dst_b inA_b inB_in va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1_stdcall win)
([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags;
va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR11; va_Mod_reg64 rR10;
va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp; va_Mod_reg64 rRbp; va_Mod_reg64 rRdi;
va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax;
va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64) : | false | false | Vale.Curve25519.X64.FastUtil.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Fast_add1_stdcall (win: bool) (dst_b inA_b: buffer64) (inB_in: nat64)
: (va_quickCode unit (va_code_Fast_add1_stdcall win)) | [] | Vale.Curve25519.X64.FastUtil.va_quick_Fast_add1_stdcall | {
"file_name": "obj/Vale.Curve25519.X64.FastUtil.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
win: Prims.bool ->
dst_b: Vale.X64.Memory.buffer64 ->
inA_b: Vale.X64.Memory.buffer64 ->
inB_in: Vale.X64.Memory.nat64
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.Curve25519.X64.FastUtil.va_code_Fast_add1_stdcall win) | {
"end_col": 66,
"end_line": 300,
"start_col": 2,
"start_line": 295
} |
Prims.Tot | val va_quick_Cswap2 (bit_in: nat64) (p0_b p1_b: buffer64) : (va_quickCode unit (va_code_Cswap2 ())) | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Cswap2 (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) : (va_quickCode unit
(va_code_Cswap2 ())) =
(va_QProc (va_code_Cswap2 ()) ([va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags;
va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdi; va_Mod_mem])
(va_wp_Cswap2 bit_in p0_b p1_b) (va_wpProof_Cswap2 bit_in p0_b p1_b)) | val va_quick_Cswap2 (bit_in: nat64) (p0_b p1_b: buffer64) : (va_quickCode unit (va_code_Cswap2 ()))
let va_quick_Cswap2 (bit_in: nat64) (p0_b p1_b: buffer64) : (va_quickCode unit (va_code_Cswap2 ())) = | false | null | false | (va_QProc (va_code_Cswap2 ())
([
va_Mod_mem_layout;
va_Mod_mem_heaplet 0;
va_Mod_flags;
va_Mod_reg64 rR10;
va_Mod_reg64 rR9;
va_Mod_reg64 rR8;
va_Mod_reg64 rRdi;
va_Mod_mem
])
(va_wp_Cswap2 bit_in p0_b p1_b)
(va_wpProof_Cswap2 bit_in p0_b p1_b)) | {
"checked_file": "Vale.Curve25519.X64.FastUtil.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Curve25519.X64.FastUtil.fsti"
} | [
"total"
] | [
"Vale.X64.Memory.nat64",
"Vale.X64.Memory.buffer64",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.Curve25519.X64.FastUtil.va_code_Cswap2",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_mem_layout",
"Vale.X64.QuickCode.va_Mod_mem_heaplet",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.QuickCode.va_Mod_mem",
"Prims.Nil",
"Vale.Curve25519.X64.FastUtil.va_wp_Cswap2",
"Vale.Curve25519.X64.FastUtil.va_wpProof_Cswap2",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.Curve25519.X64.FastUtil
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.Curve25519.Fast_defs
open Vale.X64.CPU_Features_s
//-- Fast_add1
val va_code_Fast_add1 : va_dummy:unit -> Tot va_code
val va_codegen_success_Fast_add1 : va_dummy:unit -> Tot va_pbool
let va_req_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
: prop =
(va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0))
let va_ens_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1 va_b0 va_s0 dst_b inA_b inB /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))
val va_lemma_Fast_add1 : va_b0:va_code -> va_s0:va_state -> dst_b:buffer64 -> inA_b:buffer64 ->
inB:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))
[@ va_qattr]
let va_wp_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) (va_s0:va_state) (va_k:(va_state
-> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
adx_enabled /\ bmi2_enabled /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0)
(va_get_mem va_s0) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rdx:nat64) (va_x_r8:nat64)
(va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64) (va_x_heap0:vale_heap)
(va_x_memLayout:vale_heap_layout) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags
va_x_efl (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8
(va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem va_s0))))))))) in
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Fast_add1 : dst_b:buffer64 -> inA_b:buffer64 -> inB:nat64 -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1 dst_b inA_b inB va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1 ()) ([va_Mod_flags;
va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64
rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx; va_Mod_reg64 rRax; va_Mod_mem]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) : (va_quickCode unit
(va_code_Fast_add1 ())) =
(va_QProc (va_code_Fast_add1 ()) ([va_Mod_flags; va_Mod_mem_layout; va_Mod_mem_heaplet 0;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx;
va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1 dst_b inA_b inB) (va_wpProof_Fast_add1 dst_b
inA_b inB))
//--
//-- Fast_add1_stdcall
val va_code_Fast_add1_stdcall : win:bool -> Tot va_code
val va_codegen_success_Fast_add1_stdcall : win:bool -> Tot va_pbool
let va_req_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) : prop =
(va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret))
let va_ens_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1_stdcall va_b0 va_s0 win dst_b inA_b inB_in /\ va_ensure_total va_b0 va_s0 va_sM
va_fM /\ va_get_ok va_sM /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in
let a0 = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))
val va_lemma_Fast_add1_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> dst_b:buffer64 ->
inA_b:buffer64 -> inB_in:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let a0 = Vale.X64.Decls.buffer64_read
inA_b 0 (va_get_mem va_s0) in let a1 = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0)
in let a2 = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64)
(va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\
bmi2_enabled) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout
va_s0) Secret) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rbx:nat64)
(va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rsi:nat64) (va_x_rdi:nat64) (va_x_rbp:nat64)
(va_x_rsp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64)
(va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_heap0:vale_heap) (va_x_memLayout:vale_heap_layout) (va_x_stack:vale_stack)
(va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint va_x_stackTaint (va_upd_stack
va_x_stack (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags
va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13
(va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64
rR8 va_x_r8 (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64 rRdi va_x_rdi
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx va_x_rcx
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem
va_s0)))))))))))))))))))) in va_get_ok va_sM /\ (let (dst_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64
rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let a0 =
Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) ==> va_k va_sM (())))
val va_wpProof_Fast_add1_stdcall : win:bool -> dst_b:buffer64 -> inA_b:buffer64 -> inB_in:nat64 ->
va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1_stdcall win dst_b inA_b inB_in va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1_stdcall win)
([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags;
va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR11; va_Mod_reg64 rR10;
va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp; va_Mod_reg64 rRbp; va_Mod_reg64 rRdi;
va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax;
va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64) :
(va_quickCode unit (va_code_Fast_add1_stdcall win)) =
(va_QProc (va_code_Fast_add1_stdcall win) ([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout;
va_Mod_mem_heaplet 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp;
va_Mod_reg64 rRbp; va_Mod_reg64 rRdi; va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx;
va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1_stdcall win dst_b inA_b
inB_in) (va_wpProof_Fast_add1_stdcall win dst_b inA_b inB_in))
//--
//-- Cswap2
val va_code_Cswap2 : va_dummy:unit -> Tot va_code
val va_codegen_success_Cswap2 : va_dummy:unit -> Tot va_pbool
let va_req_Cswap2 (va_b0:va_code) (va_s0:va_state) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) :
prop =
(va_require_total va_b0 (va_code_Cswap2 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0)
in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem
va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3
(va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let
(old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0)
in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem
va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3
(va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ bit_in == va_get_reg64 rRdi va_s0 /\
va_get_reg64 rRdi va_s0 <= 1 /\ (Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRdx va_s0) p1_b 8 (va_get_mem_layout va_s0) Secret))
let va_ens_Cswap2 (va_b0:va_code) (va_s0:va_state) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Cswap2 va_b0 va_s0 bit_in p0_b p1_b /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
1 (va_get_mem va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 2 (va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in let
(old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0)
in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem
va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0
(va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
1 (va_get_mem va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 2 (va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in let
(old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0)
in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem
va_s0) in Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\
(let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in let p0_1 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == (if (va_get_reg64 rRdi va_s0
= 1) then old_p1_0 else old_p0_0) /\ p0_1 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_1
else old_p0_1) /\ p0_2 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_2 else old_p0_2) /\
p0_3 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_3 else old_p0_3) /\ p0_4 == (if
(va_get_reg64 rRdi va_s0 = 1) then old_p1_4 else old_p0_4) /\ p0_5 == (if (va_get_reg64 rRdi
va_s0 = 1) then old_p1_5 else old_p0_5) /\ p0_6 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p1_6 else old_p0_6) /\ p0_7 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_7 else
old_p0_7) /\ p1_0 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_0 else old_p1_0) /\ p1_1 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_1 else old_p1_1) /\ p1_2 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_2 else old_p1_2) /\ p1_3 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p0_3 else old_p1_3) /\ p1_4 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_4 else
old_p1_4) /\ p1_5 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_5 else old_p1_5) /\ p1_6 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_6 else old_p1_6) /\ p1_7 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_7 else old_p1_7))) /\ va_state_eq va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_flags va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdi va_sM (va_update_ok
va_sM (va_update_mem va_sM va_s0))))))))))
val va_lemma_Cswap2 : va_b0:va_code -> va_s0:va_state -> bit_in:nat64 -> p0_b:buffer64 ->
p1_b:buffer64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Cswap2 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0)
in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem
va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3
(va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let
(old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0)
in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem
va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3
(va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ bit_in == va_get_reg64 rRdi va_s0 /\
va_get_reg64 rRdi va_s0 <= 1 /\ (Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRdx va_s0) p1_b 8 (va_get_mem_layout va_s0) Secret)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0)
in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem
va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2
(va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let
(old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in
let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0)
in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem
va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2
(va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let
(old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in
Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (let p0_0 =
Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in let p0_1 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == (if (va_get_reg64 rRdi va_s0
= 1) then old_p1_0 else old_p0_0) /\ p0_1 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_1
else old_p0_1) /\ p0_2 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_2 else old_p0_2) /\
p0_3 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_3 else old_p0_3) /\ p0_4 == (if
(va_get_reg64 rRdi va_s0 = 1) then old_p1_4 else old_p0_4) /\ p0_5 == (if (va_get_reg64 rRdi
va_s0 = 1) then old_p1_5 else old_p0_5) /\ p0_6 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p1_6 else old_p0_6) /\ p0_7 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_7 else
old_p0_7) /\ p1_0 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_0 else old_p1_0) /\ p1_1 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_1 else old_p1_1) /\ p1_2 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_2 else old_p1_2) /\ p1_3 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p0_3 else old_p1_3) /\ p1_4 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_4 else
old_p1_4) /\ p1_5 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_5 else old_p1_5) /\ p1_6 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_6 else old_p1_6) /\ p1_7 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_7 else old_p1_7))) /\ va_state_eq va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_flags va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdi va_sM (va_update_ok
va_sM (va_update_mem va_sM va_s0)))))))))))
[@ va_qattr]
let va_wp_Cswap2 (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) (va_s0:va_state) (va_k:(va_state ->
unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
1 (va_get_mem va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 2 (va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in let
(old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0)
in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem
va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0
(va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
1 (va_get_mem va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 2 (va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in let
(old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0)
in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem
va_s0) in Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
bit_in == va_get_reg64 rRdi va_s0 /\ va_get_reg64 rRdi va_s0 <= 1 /\
(Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdx va_s0) p1_b 8
(va_get_mem_layout va_s0) Secret) /\ (forall (va_x_mem:vale_heap) (va_x_rdi:nat64)
(va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_heap0:vale_heap) (va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout
va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags va_x_efl (va_upd_reg64 rR10
va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRdi va_x_rdi
(va_upd_mem va_x_mem va_s0))))))) in va_get_ok va_sM /\ (let (old_p0_0:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in let
(old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in
let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0)
in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem
va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 4
(va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in let
(old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in
let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0)
in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem
va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 4
(va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Decls.modifies_buffer_2 p0_b
p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_sM) in let p0_1 = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let
p0_2 = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == va_if (va_get_reg64 rRdi
va_s0 = 1) (fun _ -> old_p1_0) (fun _ -> old_p0_0) /\ p0_1 == va_if (va_get_reg64 rRdi va_s0 =
1) (fun _ -> old_p1_1) (fun _ -> old_p0_1) /\ p0_2 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun
_ -> old_p1_2) (fun _ -> old_p0_2) /\ p0_3 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_3) (fun _ -> old_p0_3) /\ p0_4 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_4) (fun _ -> old_p0_4) /\ p0_5 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_5) (fun _ -> old_p0_5) /\ p0_6 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_6) (fun _ -> old_p0_6) /\ p0_7 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_7) (fun _ -> old_p0_7) /\ p1_0 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_0) (fun _ -> old_p1_0) /\ p1_1 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_1) (fun _ -> old_p1_1) /\ p1_2 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_2) (fun _ -> old_p1_2) /\ p1_3 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_3) (fun _ -> old_p1_3) /\ p1_4 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_4) (fun _ -> old_p1_4) /\ p1_5 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_5) (fun _ -> old_p1_5) /\ p1_6 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_6) (fun _ -> old_p1_6) /\ p1_7 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_7) (fun _ -> old_p1_7))) ==> va_k va_sM (())))
val va_wpProof_Cswap2 : bit_in:nat64 -> p0_b:buffer64 -> p1_b:buffer64 -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Cswap2 bit_in p0_b p1_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Cswap2 ()) ([va_Mod_mem_layout;
va_Mod_mem_heaplet 0; va_Mod_flags; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8;
va_Mod_reg64 rRdi; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Cswap2 (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) : (va_quickCode unit | false | false | Vale.Curve25519.X64.FastUtil.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Cswap2 (bit_in: nat64) (p0_b p1_b: buffer64) : (va_quickCode unit (va_code_Cswap2 ())) | [] | Vale.Curve25519.X64.FastUtil.va_quick_Cswap2 | {
"file_name": "obj/Vale.Curve25519.X64.FastUtil.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | bit_in: Vale.X64.Memory.nat64 -> p0_b: Vale.X64.Memory.buffer64 -> p1_b: Vale.X64.Memory.buffer64
-> Vale.X64.QuickCode.va_quickCode Prims.unit (Vale.Curve25519.X64.FastUtil.va_code_Cswap2 ()) | {
"end_col": 73,
"end_line": 562,
"start_col": 2,
"start_line": 560
} |
Prims.Tot | val va_req_Cswap2_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(bit_in: nat64)
(p0_b p1_b: buffer64)
: prop | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_req_Cswap2_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (bit_in:nat64)
(p0_b:buffer64) (p1_b:buffer64) : prop =
(va_require_total va_b0 (va_code_Cswap2_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(p0_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else
va_get_reg64 rRsi va_s0) in let (p1_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (old_p0_0:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in let
(old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0) in
let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0)
in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem
va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5
(va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 7 (va_get_mem va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in let
(old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0) in
let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0)
in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem
va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5
(va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 7 (va_get_mem va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack
va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
bit_in <= 1 /\ bit_in = (if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) /\
(Vale.X64.Decls.buffers_disjoint p0_b p1_b \/ p1_b == p0_b) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) p0_in p0_b 8 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) p1_in p1_b 8 (va_get_mem_layout va_s0)
Secret)) | val va_req_Cswap2_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(bit_in: nat64)
(p0_b p1_b: buffer64)
: prop
let va_req_Cswap2_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(bit_in: nat64)
(p0_b p1_b: buffer64)
: prop = | false | null | false | (va_require_total va_b0 (va_code_Cswap2_stdcall win) va_s0 /\ va_get_ok va_s0 /\
(let p0_in:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0)
in
let p1_in:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0)
in
let old_p0_0:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let old_p0_1:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in
let old_p0_2:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0) in
let old_p0_3:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in
let old_p0_4:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in
let old_p0_5:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let old_p0_6:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in
let old_p0_7:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in
let old_p1_0:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let old_p1_1:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in
let old_p1_2:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0) in
let old_p1_3:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in
let old_p1_4:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in
let old_p1_5:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let old_p1_6:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in
let old_p1_7:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ bit_in <= 1 /\
bit_in = (if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) /\
(Vale.X64.Decls.buffers_disjoint p0_b p1_b \/ p1_b == p0_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) p0_in p0_b 8 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) p1_in p1_b 8 (va_get_mem_layout va_s0) Secret)
) | {
"checked_file": "Vale.Curve25519.X64.FastUtil.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Curve25519.X64.FastUtil.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_code",
"Vale.X64.Decls.va_state",
"Prims.bool",
"Vale.X64.Memory.nat64",
"Vale.X64.Memory.buffer64",
"Prims.l_and",
"Vale.X64.Decls.va_require_total",
"Vale.Curve25519.X64.FastUtil.va_code_Cswap2_stdcall",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Prims.eq2",
"Vale.Def.Words_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Stack_i.init_rsp",
"Vale.X64.Decls.va_get_stack",
"Vale.X64.Memory.is_initial_heap",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Decls.va_get_mem",
"Prims.op_LessThanOrEqual",
"Prims.op_Equality",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rRdi",
"Prims.l_or",
"Vale.X64.Decls.buffers_disjoint",
"Vale.X64.Decls.validDstAddrs64",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.buffer64_read",
"Vale.X64.Decls.va_int_range",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Machine_s.rRsi",
"Prims.prop"
] | [] | module Vale.Curve25519.X64.FastUtil
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.Curve25519.Fast_defs
open Vale.X64.CPU_Features_s
//-- Fast_add1
val va_code_Fast_add1 : va_dummy:unit -> Tot va_code
val va_codegen_success_Fast_add1 : va_dummy:unit -> Tot va_pbool
let va_req_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
: prop =
(va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0))
let va_ens_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1 va_b0 va_s0 dst_b inA_b inB /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))
val va_lemma_Fast_add1 : va_b0:va_code -> va_s0:va_state -> dst_b:buffer64 -> inA_b:buffer64 ->
inB:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))
[@ va_qattr]
let va_wp_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) (va_s0:va_state) (va_k:(va_state
-> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
adx_enabled /\ bmi2_enabled /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0)
(va_get_mem va_s0) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rdx:nat64) (va_x_r8:nat64)
(va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64) (va_x_heap0:vale_heap)
(va_x_memLayout:vale_heap_layout) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags
va_x_efl (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8
(va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem va_s0))))))))) in
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Fast_add1 : dst_b:buffer64 -> inA_b:buffer64 -> inB:nat64 -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1 dst_b inA_b inB va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1 ()) ([va_Mod_flags;
va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64
rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx; va_Mod_reg64 rRax; va_Mod_mem]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) : (va_quickCode unit
(va_code_Fast_add1 ())) =
(va_QProc (va_code_Fast_add1 ()) ([va_Mod_flags; va_Mod_mem_layout; va_Mod_mem_heaplet 0;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx;
va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1 dst_b inA_b inB) (va_wpProof_Fast_add1 dst_b
inA_b inB))
//--
//-- Fast_add1_stdcall
val va_code_Fast_add1_stdcall : win:bool -> Tot va_code
val va_codegen_success_Fast_add1_stdcall : win:bool -> Tot va_pbool
let va_req_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) : prop =
(va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret))
let va_ens_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1_stdcall va_b0 va_s0 win dst_b inA_b inB_in /\ va_ensure_total va_b0 va_s0 va_sM
va_fM /\ va_get_ok va_sM /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in
let a0 = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))
val va_lemma_Fast_add1_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> dst_b:buffer64 ->
inA_b:buffer64 -> inB_in:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let a0 = Vale.X64.Decls.buffer64_read
inA_b 0 (va_get_mem va_s0) in let a1 = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0)
in let a2 = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64)
(va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\
bmi2_enabled) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout
va_s0) Secret) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rbx:nat64)
(va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rsi:nat64) (va_x_rdi:nat64) (va_x_rbp:nat64)
(va_x_rsp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64)
(va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_heap0:vale_heap) (va_x_memLayout:vale_heap_layout) (va_x_stack:vale_stack)
(va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint va_x_stackTaint (va_upd_stack
va_x_stack (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags
va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13
(va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64
rR8 va_x_r8 (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64 rRdi va_x_rdi
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx va_x_rcx
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem
va_s0)))))))))))))))))))) in va_get_ok va_sM /\ (let (dst_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64
rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let a0 =
Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) ==> va_k va_sM (())))
val va_wpProof_Fast_add1_stdcall : win:bool -> dst_b:buffer64 -> inA_b:buffer64 -> inB_in:nat64 ->
va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1_stdcall win dst_b inA_b inB_in va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1_stdcall win)
([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags;
va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR11; va_Mod_reg64 rR10;
va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp; va_Mod_reg64 rRbp; va_Mod_reg64 rRdi;
va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax;
va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64) :
(va_quickCode unit (va_code_Fast_add1_stdcall win)) =
(va_QProc (va_code_Fast_add1_stdcall win) ([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout;
va_Mod_mem_heaplet 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp;
va_Mod_reg64 rRbp; va_Mod_reg64 rRdi; va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx;
va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1_stdcall win dst_b inA_b
inB_in) (va_wpProof_Fast_add1_stdcall win dst_b inA_b inB_in))
//--
//-- Cswap2
val va_code_Cswap2 : va_dummy:unit -> Tot va_code
val va_codegen_success_Cswap2 : va_dummy:unit -> Tot va_pbool
let va_req_Cswap2 (va_b0:va_code) (va_s0:va_state) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) :
prop =
(va_require_total va_b0 (va_code_Cswap2 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0)
in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem
va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3
(va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let
(old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0)
in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem
va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3
(va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ bit_in == va_get_reg64 rRdi va_s0 /\
va_get_reg64 rRdi va_s0 <= 1 /\ (Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRdx va_s0) p1_b 8 (va_get_mem_layout va_s0) Secret))
let va_ens_Cswap2 (va_b0:va_code) (va_s0:va_state) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Cswap2 va_b0 va_s0 bit_in p0_b p1_b /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
1 (va_get_mem va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 2 (va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in let
(old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0)
in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem
va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0
(va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
1 (va_get_mem va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 2 (va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in let
(old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0)
in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem
va_s0) in Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\
(let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in let p0_1 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == (if (va_get_reg64 rRdi va_s0
= 1) then old_p1_0 else old_p0_0) /\ p0_1 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_1
else old_p0_1) /\ p0_2 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_2 else old_p0_2) /\
p0_3 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_3 else old_p0_3) /\ p0_4 == (if
(va_get_reg64 rRdi va_s0 = 1) then old_p1_4 else old_p0_4) /\ p0_5 == (if (va_get_reg64 rRdi
va_s0 = 1) then old_p1_5 else old_p0_5) /\ p0_6 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p1_6 else old_p0_6) /\ p0_7 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_7 else
old_p0_7) /\ p1_0 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_0 else old_p1_0) /\ p1_1 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_1 else old_p1_1) /\ p1_2 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_2 else old_p1_2) /\ p1_3 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p0_3 else old_p1_3) /\ p1_4 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_4 else
old_p1_4) /\ p1_5 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_5 else old_p1_5) /\ p1_6 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_6 else old_p1_6) /\ p1_7 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_7 else old_p1_7))) /\ va_state_eq va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_flags va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdi va_sM (va_update_ok
va_sM (va_update_mem va_sM va_s0))))))))))
val va_lemma_Cswap2 : va_b0:va_code -> va_s0:va_state -> bit_in:nat64 -> p0_b:buffer64 ->
p1_b:buffer64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Cswap2 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0)
in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem
va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3
(va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let
(old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0)
in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem
va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3
(va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ bit_in == va_get_reg64 rRdi va_s0 /\
va_get_reg64 rRdi va_s0 <= 1 /\ (Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRdx va_s0) p1_b 8 (va_get_mem_layout va_s0) Secret)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0)
in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem
va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2
(va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let
(old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in
let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0)
in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem
va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2
(va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let
(old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in
Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (let p0_0 =
Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in let p0_1 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == (if (va_get_reg64 rRdi va_s0
= 1) then old_p1_0 else old_p0_0) /\ p0_1 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_1
else old_p0_1) /\ p0_2 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_2 else old_p0_2) /\
p0_3 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_3 else old_p0_3) /\ p0_4 == (if
(va_get_reg64 rRdi va_s0 = 1) then old_p1_4 else old_p0_4) /\ p0_5 == (if (va_get_reg64 rRdi
va_s0 = 1) then old_p1_5 else old_p0_5) /\ p0_6 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p1_6 else old_p0_6) /\ p0_7 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_7 else
old_p0_7) /\ p1_0 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_0 else old_p1_0) /\ p1_1 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_1 else old_p1_1) /\ p1_2 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_2 else old_p1_2) /\ p1_3 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p0_3 else old_p1_3) /\ p1_4 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_4 else
old_p1_4) /\ p1_5 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_5 else old_p1_5) /\ p1_6 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_6 else old_p1_6) /\ p1_7 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_7 else old_p1_7))) /\ va_state_eq va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_flags va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdi va_sM (va_update_ok
va_sM (va_update_mem va_sM va_s0)))))))))))
[@ va_qattr]
let va_wp_Cswap2 (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) (va_s0:va_state) (va_k:(va_state ->
unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
1 (va_get_mem va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 2 (va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in let
(old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0)
in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem
va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0
(va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
1 (va_get_mem va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 2 (va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in let
(old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0)
in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem
va_s0) in Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
bit_in == va_get_reg64 rRdi va_s0 /\ va_get_reg64 rRdi va_s0 <= 1 /\
(Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdx va_s0) p1_b 8
(va_get_mem_layout va_s0) Secret) /\ (forall (va_x_mem:vale_heap) (va_x_rdi:nat64)
(va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_heap0:vale_heap) (va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout
va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags va_x_efl (va_upd_reg64 rR10
va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRdi va_x_rdi
(va_upd_mem va_x_mem va_s0))))))) in va_get_ok va_sM /\ (let (old_p0_0:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in let
(old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in
let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0)
in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem
va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 4
(va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in let
(old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in
let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0)
in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem
va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 4
(va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Decls.modifies_buffer_2 p0_b
p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_sM) in let p0_1 = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let
p0_2 = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == va_if (va_get_reg64 rRdi
va_s0 = 1) (fun _ -> old_p1_0) (fun _ -> old_p0_0) /\ p0_1 == va_if (va_get_reg64 rRdi va_s0 =
1) (fun _ -> old_p1_1) (fun _ -> old_p0_1) /\ p0_2 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun
_ -> old_p1_2) (fun _ -> old_p0_2) /\ p0_3 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_3) (fun _ -> old_p0_3) /\ p0_4 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_4) (fun _ -> old_p0_4) /\ p0_5 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_5) (fun _ -> old_p0_5) /\ p0_6 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_6) (fun _ -> old_p0_6) /\ p0_7 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_7) (fun _ -> old_p0_7) /\ p1_0 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_0) (fun _ -> old_p1_0) /\ p1_1 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_1) (fun _ -> old_p1_1) /\ p1_2 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_2) (fun _ -> old_p1_2) /\ p1_3 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_3) (fun _ -> old_p1_3) /\ p1_4 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_4) (fun _ -> old_p1_4) /\ p1_5 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_5) (fun _ -> old_p1_5) /\ p1_6 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_6) (fun _ -> old_p1_6) /\ p1_7 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_7) (fun _ -> old_p1_7))) ==> va_k va_sM (())))
val va_wpProof_Cswap2 : bit_in:nat64 -> p0_b:buffer64 -> p1_b:buffer64 -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Cswap2 bit_in p0_b p1_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Cswap2 ()) ([va_Mod_mem_layout;
va_Mod_mem_heaplet 0; va_Mod_flags; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8;
va_Mod_reg64 rRdi; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Cswap2 (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) : (va_quickCode unit
(va_code_Cswap2 ())) =
(va_QProc (va_code_Cswap2 ()) ([va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags;
va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdi; va_Mod_mem])
(va_wp_Cswap2 bit_in p0_b p1_b) (va_wpProof_Cswap2 bit_in p0_b p1_b))
//--
//-- Cswap2_stdcall
val va_code_Cswap2_stdcall : win:bool -> Tot va_code
val va_codegen_success_Cswap2_stdcall : win:bool -> Tot va_pbool
let va_req_Cswap2_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (bit_in:nat64) | false | true | Vale.Curve25519.X64.FastUtil.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_req_Cswap2_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(bit_in: nat64)
(p0_b p1_b: buffer64)
: prop | [] | Vale.Curve25519.X64.FastUtil.va_req_Cswap2_stdcall | {
"file_name": "obj/Vale.Curve25519.X64.FastUtil.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
va_b0: Vale.X64.Decls.va_code ->
va_s0: Vale.X64.Decls.va_state ->
win: Prims.bool ->
bit_in: Vale.X64.Memory.nat64 ->
p0_b: Vale.X64.Memory.buffer64 ->
p1_b: Vale.X64.Memory.buffer64
-> Prims.prop | {
"end_col": 12,
"end_line": 598,
"start_col": 2,
"start_line": 571
} |
Prims.Tot | val va_quick_Cswap2_stdcall (win: bool) (bit_in: nat64) (p0_b p1_b: buffer64)
: (va_quickCode unit (va_code_Cswap2_stdcall win)) | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Cswap2_stdcall (win:bool) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) :
(va_quickCode unit (va_code_Cswap2_stdcall win)) =
(va_QProc (va_code_Cswap2_stdcall win) ([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout;
va_Mod_mem_heaplet 0; va_Mod_flags; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8;
va_Mod_reg64 rRsp; va_Mod_reg64 rRdi; va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_mem])
(va_wp_Cswap2_stdcall win bit_in p0_b p1_b) (va_wpProof_Cswap2_stdcall win bit_in p0_b p1_b)) | val va_quick_Cswap2_stdcall (win: bool) (bit_in: nat64) (p0_b p1_b: buffer64)
: (va_quickCode unit (va_code_Cswap2_stdcall win))
let va_quick_Cswap2_stdcall (win: bool) (bit_in: nat64) (p0_b p1_b: buffer64)
: (va_quickCode unit (va_code_Cswap2_stdcall win)) = | false | null | false | (va_QProc (va_code_Cswap2_stdcall win)
([
va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags;
va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp; va_Mod_reg64 rRdi;
va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_mem
])
(va_wp_Cswap2_stdcall win bit_in p0_b p1_b)
(va_wpProof_Cswap2_stdcall win bit_in p0_b p1_b)) | {
"checked_file": "Vale.Curve25519.X64.FastUtil.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Curve25519.X64.FastUtil.fsti"
} | [
"total"
] | [
"Prims.bool",
"Vale.X64.Memory.nat64",
"Vale.X64.Memory.buffer64",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.Curve25519.X64.FastUtil.va_code_Cswap2_stdcall",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_stackTaint",
"Vale.X64.QuickCode.va_Mod_stack",
"Vale.X64.QuickCode.va_Mod_mem_layout",
"Vale.X64.QuickCode.va_Mod_mem_heaplet",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.QuickCode.va_Mod_mem",
"Prims.Nil",
"Vale.Curve25519.X64.FastUtil.va_wp_Cswap2_stdcall",
"Vale.Curve25519.X64.FastUtil.va_wpProof_Cswap2_stdcall",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.Curve25519.X64.FastUtil
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.Curve25519.Fast_defs
open Vale.X64.CPU_Features_s
//-- Fast_add1
val va_code_Fast_add1 : va_dummy:unit -> Tot va_code
val va_codegen_success_Fast_add1 : va_dummy:unit -> Tot va_pbool
let va_req_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
: prop =
(va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0))
let va_ens_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1 va_b0 va_s0 dst_b inA_b inB /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))
val va_lemma_Fast_add1 : va_b0:va_code -> va_s0:va_state -> dst_b:buffer64 -> inA_b:buffer64 ->
inB:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))
[@ va_qattr]
let va_wp_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) (va_s0:va_state) (va_k:(va_state
-> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
adx_enabled /\ bmi2_enabled /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0)
(va_get_mem va_s0) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rdx:nat64) (va_x_r8:nat64)
(va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64) (va_x_heap0:vale_heap)
(va_x_memLayout:vale_heap_layout) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags
va_x_efl (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8
(va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem va_s0))))))))) in
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Fast_add1 : dst_b:buffer64 -> inA_b:buffer64 -> inB:nat64 -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1 dst_b inA_b inB va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1 ()) ([va_Mod_flags;
va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64
rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx; va_Mod_reg64 rRax; va_Mod_mem]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) : (va_quickCode unit
(va_code_Fast_add1 ())) =
(va_QProc (va_code_Fast_add1 ()) ([va_Mod_flags; va_Mod_mem_layout; va_Mod_mem_heaplet 0;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx;
va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1 dst_b inA_b inB) (va_wpProof_Fast_add1 dst_b
inA_b inB))
//--
//-- Fast_add1_stdcall
val va_code_Fast_add1_stdcall : win:bool -> Tot va_code
val va_codegen_success_Fast_add1_stdcall : win:bool -> Tot va_pbool
let va_req_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) : prop =
(va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret))
let va_ens_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1_stdcall va_b0 va_s0 win dst_b inA_b inB_in /\ va_ensure_total va_b0 va_s0 va_sM
va_fM /\ va_get_ok va_sM /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in
let a0 = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))
val va_lemma_Fast_add1_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> dst_b:buffer64 ->
inA_b:buffer64 -> inB_in:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let a0 = Vale.X64.Decls.buffer64_read
inA_b 0 (va_get_mem va_s0) in let a1 = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0)
in let a2 = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64)
(va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\
bmi2_enabled) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout
va_s0) Secret) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rbx:nat64)
(va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rsi:nat64) (va_x_rdi:nat64) (va_x_rbp:nat64)
(va_x_rsp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64)
(va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_heap0:vale_heap) (va_x_memLayout:vale_heap_layout) (va_x_stack:vale_stack)
(va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint va_x_stackTaint (va_upd_stack
va_x_stack (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags
va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13
(va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64
rR8 va_x_r8 (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64 rRdi va_x_rdi
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx va_x_rcx
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem
va_s0)))))))))))))))))))) in va_get_ok va_sM /\ (let (dst_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64
rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let a0 =
Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) ==> va_k va_sM (())))
val va_wpProof_Fast_add1_stdcall : win:bool -> dst_b:buffer64 -> inA_b:buffer64 -> inB_in:nat64 ->
va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1_stdcall win dst_b inA_b inB_in va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1_stdcall win)
([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags;
va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR11; va_Mod_reg64 rR10;
va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp; va_Mod_reg64 rRbp; va_Mod_reg64 rRdi;
va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax;
va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64) :
(va_quickCode unit (va_code_Fast_add1_stdcall win)) =
(va_QProc (va_code_Fast_add1_stdcall win) ([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout;
va_Mod_mem_heaplet 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp;
va_Mod_reg64 rRbp; va_Mod_reg64 rRdi; va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx;
va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1_stdcall win dst_b inA_b
inB_in) (va_wpProof_Fast_add1_stdcall win dst_b inA_b inB_in))
//--
//-- Cswap2
val va_code_Cswap2 : va_dummy:unit -> Tot va_code
val va_codegen_success_Cswap2 : va_dummy:unit -> Tot va_pbool
let va_req_Cswap2 (va_b0:va_code) (va_s0:va_state) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) :
prop =
(va_require_total va_b0 (va_code_Cswap2 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0)
in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem
va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3
(va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let
(old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0)
in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem
va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3
(va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ bit_in == va_get_reg64 rRdi va_s0 /\
va_get_reg64 rRdi va_s0 <= 1 /\ (Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRdx va_s0) p1_b 8 (va_get_mem_layout va_s0) Secret))
let va_ens_Cswap2 (va_b0:va_code) (va_s0:va_state) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Cswap2 va_b0 va_s0 bit_in p0_b p1_b /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
1 (va_get_mem va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 2 (va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in let
(old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0)
in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem
va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0
(va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
1 (va_get_mem va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 2 (va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in let
(old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0)
in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem
va_s0) in Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\
(let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in let p0_1 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == (if (va_get_reg64 rRdi va_s0
= 1) then old_p1_0 else old_p0_0) /\ p0_1 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_1
else old_p0_1) /\ p0_2 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_2 else old_p0_2) /\
p0_3 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_3 else old_p0_3) /\ p0_4 == (if
(va_get_reg64 rRdi va_s0 = 1) then old_p1_4 else old_p0_4) /\ p0_5 == (if (va_get_reg64 rRdi
va_s0 = 1) then old_p1_5 else old_p0_5) /\ p0_6 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p1_6 else old_p0_6) /\ p0_7 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_7 else
old_p0_7) /\ p1_0 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_0 else old_p1_0) /\ p1_1 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_1 else old_p1_1) /\ p1_2 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_2 else old_p1_2) /\ p1_3 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p0_3 else old_p1_3) /\ p1_4 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_4 else
old_p1_4) /\ p1_5 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_5 else old_p1_5) /\ p1_6 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_6 else old_p1_6) /\ p1_7 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_7 else old_p1_7))) /\ va_state_eq va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_flags va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdi va_sM (va_update_ok
va_sM (va_update_mem va_sM va_s0))))))))))
val va_lemma_Cswap2 : va_b0:va_code -> va_s0:va_state -> bit_in:nat64 -> p0_b:buffer64 ->
p1_b:buffer64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Cswap2 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0)
in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem
va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3
(va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let
(old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0)
in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem
va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3
(va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ bit_in == va_get_reg64 rRdi va_s0 /\
va_get_reg64 rRdi va_s0 <= 1 /\ (Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRdx va_s0) p1_b 8 (va_get_mem_layout va_s0) Secret)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0)
in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem
va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2
(va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let
(old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in
let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0)
in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem
va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2
(va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let
(old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in
Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (let p0_0 =
Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in let p0_1 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == (if (va_get_reg64 rRdi va_s0
= 1) then old_p1_0 else old_p0_0) /\ p0_1 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_1
else old_p0_1) /\ p0_2 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_2 else old_p0_2) /\
p0_3 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_3 else old_p0_3) /\ p0_4 == (if
(va_get_reg64 rRdi va_s0 = 1) then old_p1_4 else old_p0_4) /\ p0_5 == (if (va_get_reg64 rRdi
va_s0 = 1) then old_p1_5 else old_p0_5) /\ p0_6 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p1_6 else old_p0_6) /\ p0_7 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_7 else
old_p0_7) /\ p1_0 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_0 else old_p1_0) /\ p1_1 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_1 else old_p1_1) /\ p1_2 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_2 else old_p1_2) /\ p1_3 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p0_3 else old_p1_3) /\ p1_4 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_4 else
old_p1_4) /\ p1_5 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_5 else old_p1_5) /\ p1_6 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_6 else old_p1_6) /\ p1_7 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_7 else old_p1_7))) /\ va_state_eq va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_flags va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdi va_sM (va_update_ok
va_sM (va_update_mem va_sM va_s0)))))))))))
[@ va_qattr]
let va_wp_Cswap2 (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) (va_s0:va_state) (va_k:(va_state ->
unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
1 (va_get_mem va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 2 (va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in let
(old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0)
in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem
va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0
(va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
1 (va_get_mem va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 2 (va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in let
(old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0)
in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem
va_s0) in Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
bit_in == va_get_reg64 rRdi va_s0 /\ va_get_reg64 rRdi va_s0 <= 1 /\
(Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdx va_s0) p1_b 8
(va_get_mem_layout va_s0) Secret) /\ (forall (va_x_mem:vale_heap) (va_x_rdi:nat64)
(va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_heap0:vale_heap) (va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout
va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags va_x_efl (va_upd_reg64 rR10
va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRdi va_x_rdi
(va_upd_mem va_x_mem va_s0))))))) in va_get_ok va_sM /\ (let (old_p0_0:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in let
(old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in
let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0)
in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem
va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 4
(va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in let
(old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in
let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0)
in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem
va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 4
(va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Decls.modifies_buffer_2 p0_b
p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_sM) in let p0_1 = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let
p0_2 = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == va_if (va_get_reg64 rRdi
va_s0 = 1) (fun _ -> old_p1_0) (fun _ -> old_p0_0) /\ p0_1 == va_if (va_get_reg64 rRdi va_s0 =
1) (fun _ -> old_p1_1) (fun _ -> old_p0_1) /\ p0_2 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun
_ -> old_p1_2) (fun _ -> old_p0_2) /\ p0_3 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_3) (fun _ -> old_p0_3) /\ p0_4 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_4) (fun _ -> old_p0_4) /\ p0_5 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_5) (fun _ -> old_p0_5) /\ p0_6 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_6) (fun _ -> old_p0_6) /\ p0_7 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_7) (fun _ -> old_p0_7) /\ p1_0 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_0) (fun _ -> old_p1_0) /\ p1_1 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_1) (fun _ -> old_p1_1) /\ p1_2 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_2) (fun _ -> old_p1_2) /\ p1_3 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_3) (fun _ -> old_p1_3) /\ p1_4 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_4) (fun _ -> old_p1_4) /\ p1_5 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_5) (fun _ -> old_p1_5) /\ p1_6 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_6) (fun _ -> old_p1_6) /\ p1_7 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_7) (fun _ -> old_p1_7))) ==> va_k va_sM (())))
val va_wpProof_Cswap2 : bit_in:nat64 -> p0_b:buffer64 -> p1_b:buffer64 -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Cswap2 bit_in p0_b p1_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Cswap2 ()) ([va_Mod_mem_layout;
va_Mod_mem_heaplet 0; va_Mod_flags; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8;
va_Mod_reg64 rRdi; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Cswap2 (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) : (va_quickCode unit
(va_code_Cswap2 ())) =
(va_QProc (va_code_Cswap2 ()) ([va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags;
va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdi; va_Mod_mem])
(va_wp_Cswap2 bit_in p0_b p1_b) (va_wpProof_Cswap2 bit_in p0_b p1_b))
//--
//-- Cswap2_stdcall
val va_code_Cswap2_stdcall : win:bool -> Tot va_code
val va_codegen_success_Cswap2_stdcall : win:bool -> Tot va_pbool
let va_req_Cswap2_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (bit_in:nat64)
(p0_b:buffer64) (p1_b:buffer64) : prop =
(va_require_total va_b0 (va_code_Cswap2_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(p0_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else
va_get_reg64 rRsi va_s0) in let (p1_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (old_p0_0:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in let
(old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0) in
let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0)
in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem
va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5
(va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 7 (va_get_mem va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in let
(old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0) in
let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0)
in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem
va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5
(va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 7 (va_get_mem va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack
va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
bit_in <= 1 /\ bit_in = (if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) /\
(Vale.X64.Decls.buffers_disjoint p0_b p1_b \/ p1_b == p0_b) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) p0_in p0_b 8 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) p1_in p1_b 8 (va_get_mem_layout va_s0)
Secret))
let va_ens_Cswap2_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (bit_in:nat64)
(p0_b:buffer64) (p1_b:buffer64) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Cswap2_stdcall va_b0 va_s0 win bit_in p0_b p1_b /\ va_ensure_total va_b0 va_s0 va_sM
va_fM /\ va_get_ok va_sM /\ (let (p0_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (p1_in:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0)
in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem
va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2
(va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let
(old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in
let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0)
in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem
va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2
(va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let
(old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in
let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in let p0_1 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == (if (bit_in = 1) then
old_p1_0 else old_p0_0) /\ p0_1 == (if (bit_in = 1) then old_p1_1 else old_p0_1) /\ p0_2 == (if
(bit_in = 1) then old_p1_2 else old_p0_2) /\ p0_3 == (if (bit_in = 1) then old_p1_3 else
old_p0_3) /\ p0_4 == (if (bit_in = 1) then old_p1_4 else old_p0_4) /\ p0_5 == (if (bit_in = 1)
then old_p1_5 else old_p0_5) /\ p0_6 == (if (bit_in = 1) then old_p1_6 else old_p0_6) /\ p0_7
== (if (bit_in = 1) then old_p1_7 else old_p0_7) /\ p1_0 == (if (bit_in = 1) then old_p0_0 else
old_p1_0) /\ p1_1 == (if (bit_in = 1) then old_p0_1 else old_p1_1) /\ p1_2 == (if (bit_in = 1)
then old_p0_2 else old_p1_2) /\ p1_3 == (if (bit_in = 1) then old_p0_3 else old_p1_3) /\ p1_4
== (if (bit_in = 1) then old_p0_4 else old_p1_4) /\ p1_5 == (if (bit_in = 1) then old_p0_5 else
old_p1_5) /\ p1_6 == (if (bit_in = 1) then old_p0_6 else old_p1_6) /\ p1_7 == (if (bit_in = 1)
then old_p0_7 else old_p1_7) /\ Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0)
(va_get_mem va_sM) /\ (win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==>
va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\ va_get_reg64 rRsp va_sM == va_get_reg64
rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM
(va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRsp va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))
val va_lemma_Cswap2_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> bit_in:nat64 ->
p0_b:buffer64 -> p1_b:buffer64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Cswap2_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(p0_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else
va_get_reg64 rRsi va_s0) in let (p1_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (old_p0_0:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in let
(old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0) in
let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0)
in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem
va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5
(va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 7 (va_get_mem va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in let
(old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0) in
let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0)
in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem
va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5
(va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 7 (va_get_mem va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack
va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
bit_in <= 1 /\ bit_in = (if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) /\
(Vale.X64.Decls.buffers_disjoint p0_b p1_b \/ p1_b == p0_b) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) p0_in p0_b 8 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) p1_in p1_b 8 (va_get_mem_layout va_s0)
Secret)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (p0_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else
va_get_reg64 rRsi va_s0) in let (p1_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (old_p0_0:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in let
(old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0) in
let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0)
in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem
va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5
(va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 7 (va_get_mem va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in let
(old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0) in
let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0)
in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem
va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5
(va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 7 (va_get_mem va_s0) in let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM)
in let p0_1 = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == (if (bit_in = 1) then
old_p1_0 else old_p0_0) /\ p0_1 == (if (bit_in = 1) then old_p1_1 else old_p0_1) /\ p0_2 == (if
(bit_in = 1) then old_p1_2 else old_p0_2) /\ p0_3 == (if (bit_in = 1) then old_p1_3 else
old_p0_3) /\ p0_4 == (if (bit_in = 1) then old_p1_4 else old_p0_4) /\ p0_5 == (if (bit_in = 1)
then old_p1_5 else old_p0_5) /\ p0_6 == (if (bit_in = 1) then old_p1_6 else old_p0_6) /\ p0_7
== (if (bit_in = 1) then old_p1_7 else old_p0_7) /\ p1_0 == (if (bit_in = 1) then old_p0_0 else
old_p1_0) /\ p1_1 == (if (bit_in = 1) then old_p0_1 else old_p1_1) /\ p1_2 == (if (bit_in = 1)
then old_p0_2 else old_p1_2) /\ p1_3 == (if (bit_in = 1) then old_p0_3 else old_p1_3) /\ p1_4
== (if (bit_in = 1) then old_p0_4 else old_p1_4) /\ p1_5 == (if (bit_in = 1) then old_p0_5 else
old_p1_5) /\ p1_6 == (if (bit_in = 1) then old_p0_6 else old_p1_6) /\ p1_7 == (if (bit_in = 1)
then old_p0_7 else old_p1_7) /\ Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0)
(va_get_mem va_sM) /\ (win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==>
va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\ va_get_reg64 rRsp va_sM == va_get_reg64
rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM
(va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRsp va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))
[@ va_qattr]
let va_wp_Cswap2_stdcall (win:bool) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (p0_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let (p1_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64
rRdx va_s0) in let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
1 (va_get_mem va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 2 (va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in let
(old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0)
in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem
va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0
(va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
1 (va_get_mem va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 2 (va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in let
(old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0)
in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem
va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ bit_in <= 1 /\
bit_in = va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) /\
(Vale.X64.Decls.buffers_disjoint p0_b p1_b \/ p1_b == p0_b) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) p0_in p0_b 8 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) p1_in p1_b 8 (va_get_mem_layout va_s0)
Secret) /\ (forall (va_x_mem:vale_heap) (va_x_rdx:nat64) (va_x_rsi:nat64) (va_x_rdi:nat64)
(va_x_rsp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_heap0:vale_heap) (va_x_memLayout:vale_heap_layout) (va_x_stack:vale_stack)
(va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint va_x_stackTaint (va_upd_stack
va_x_stack (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags
va_x_efl (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8
(va_upd_reg64 rRsp va_x_rsp (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRsi va_x_rsi
(va_upd_reg64 rRdx va_x_rdx (va_upd_mem va_x_mem va_s0)))))))))))) in va_get_ok va_sM /\ (let
(p0_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0)
(fun _ -> va_get_reg64 rRsi va_s0) in let (p1_in:(va_int_range 0 18446744073709551615)) = va_if
win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) in let
(old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0)
in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem
va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3
(va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let
(old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0)
in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem
va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3
(va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in let p0_0 =
Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in let p0_1 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == va_if (bit_in = 1) (fun _ ->
old_p1_0) (fun _ -> old_p0_0) /\ p0_1 == va_if (bit_in = 1) (fun _ -> old_p1_1) (fun _ ->
old_p0_1) /\ p0_2 == va_if (bit_in = 1) (fun _ -> old_p1_2) (fun _ -> old_p0_2) /\ p0_3 ==
va_if (bit_in = 1) (fun _ -> old_p1_3) (fun _ -> old_p0_3) /\ p0_4 == va_if (bit_in = 1) (fun _
-> old_p1_4) (fun _ -> old_p0_4) /\ p0_5 == va_if (bit_in = 1) (fun _ -> old_p1_5) (fun _ ->
old_p0_5) /\ p0_6 == va_if (bit_in = 1) (fun _ -> old_p1_6) (fun _ -> old_p0_6) /\ p0_7 ==
va_if (bit_in = 1) (fun _ -> old_p1_7) (fun _ -> old_p0_7) /\ p1_0 == va_if (bit_in = 1) (fun _
-> old_p0_0) (fun _ -> old_p1_0) /\ p1_1 == va_if (bit_in = 1) (fun _ -> old_p0_1) (fun _ ->
old_p1_1) /\ p1_2 == va_if (bit_in = 1) (fun _ -> old_p0_2) (fun _ -> old_p1_2) /\ p1_3 ==
va_if (bit_in = 1) (fun _ -> old_p0_3) (fun _ -> old_p1_3) /\ p1_4 == va_if (bit_in = 1) (fun _
-> old_p0_4) (fun _ -> old_p1_4) /\ p1_5 == va_if (bit_in = 1) (fun _ -> old_p0_5) (fun _ ->
old_p1_5) /\ p1_6 == va_if (bit_in = 1) (fun _ -> old_p0_6) (fun _ -> old_p1_6) /\ p1_7 ==
va_if (bit_in = 1) (fun _ -> old_p0_7) (fun _ -> old_p1_7) /\ Vale.X64.Decls.modifies_buffer_2
p0_b p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRdi va_sM ==
va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) ==> va_k va_sM (())))
val va_wpProof_Cswap2_stdcall : win:bool -> bit_in:nat64 -> p0_b:buffer64 -> p1_b:buffer64 ->
va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Cswap2_stdcall win bit_in p0_b p1_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Cswap2_stdcall win)
([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags;
va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp; va_Mod_reg64 rRdi;
va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Cswap2_stdcall (win:bool) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) : | false | false | Vale.Curve25519.X64.FastUtil.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Cswap2_stdcall (win: bool) (bit_in: nat64) (p0_b p1_b: buffer64)
: (va_quickCode unit (va_code_Cswap2_stdcall win)) | [] | Vale.Curve25519.X64.FastUtil.va_quick_Cswap2_stdcall | {
"file_name": "obj/Vale.Curve25519.X64.FastUtil.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
win: Prims.bool ->
bit_in: Vale.X64.Memory.nat64 ->
p0_b: Vale.X64.Memory.buffer64 ->
p1_b: Vale.X64.Memory.buffer64
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.Curve25519.X64.FastUtil.va_code_Cswap2_stdcall win) | {
"end_col": 97,
"end_line": 847,
"start_col": 2,
"start_line": 844
} |
Prims.Tot | val va_wp_Fast_add1_stdcall
(win: bool)
(dst_b inA_b: buffer64)
(inB_in: nat64)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64)
(va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\
bmi2_enabled) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout
va_s0) Secret) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rbx:nat64)
(va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rsi:nat64) (va_x_rdi:nat64) (va_x_rbp:nat64)
(va_x_rsp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64)
(va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_heap0:vale_heap) (va_x_memLayout:vale_heap_layout) (va_x_stack:vale_stack)
(va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint va_x_stackTaint (va_upd_stack
va_x_stack (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags
va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13
(va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64
rR8 va_x_r8 (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64 rRdi va_x_rdi
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx va_x_rcx
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem
va_s0)))))))))))))))))))) in va_get_ok va_sM /\ (let (dst_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64
rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let a0 =
Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) ==> va_k va_sM (()))) | val va_wp_Fast_add1_stdcall
(win: bool)
(dst_b inA_b: buffer64)
(inB_in: nat64)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_Fast_add1_stdcall
(win: bool)
(dst_b inA_b: buffer64)
(inB_in: nat64)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_get_ok va_s0 /\
(let dst_in:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0)
in
let inA_in:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0)
in
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(adx_enabled /\ bmi2_enabled) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
inB_in = va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
dst_in
dst_b
4
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
inA_in
inA_b
4
(va_get_mem_layout va_s0)
Secret) /\
(forall (va_x_mem: vale_heap) (va_x_rax: nat64) (va_x_rbx: nat64) (va_x_rcx: nat64)
(va_x_rdx: nat64) (va_x_rsi: nat64) (va_x_rdi: nat64) (va_x_rbp: nat64) (va_x_rsp: nat64)
(va_x_r8: nat64) (va_x_r9: nat64) (va_x_r10: nat64) (va_x_r11: nat64) (va_x_r13: nat64)
(va_x_r14: nat64) (va_x_r15: nat64) (va_x_efl: Vale.X64.Flags.t) (va_x_heap0: vale_heap)
(va_x_memLayout: vale_heap_layout) (va_x_stack: vale_stack) (va_x_stackTaint: memtaint).
let va_sM =
va_upd_stackTaint va_x_stackTaint
(va_upd_stack va_x_stack
(va_upd_mem_layout va_x_memLayout
(va_upd_mem_heaplet 0
va_x_heap0
(va_upd_flags va_x_efl
(va_upd_reg64 rR15
va_x_r15
(va_upd_reg64 rR14
va_x_r14
(va_upd_reg64 rR13
va_x_r13
(va_upd_reg64 rR11
va_x_r11
(va_upd_reg64 rR10
va_x_r10
(va_upd_reg64 rR9
va_x_r9
(va_upd_reg64 rR8
va_x_r8
(va_upd_reg64 rRsp
va_x_rsp
(va_upd_reg64 rRbp
va_x_rbp
(va_upd_reg64 rRdi
va_x_rdi
(va_upd_reg64 rRsi
va_x_rsi
(va_upd_reg64 rRdx
va_x_rdx
(va_upd_reg64 rRcx
va_x_rcx
(va_upd_reg64 rRbx
va_x_rbx
(va_upd_reg64 rRax
va_x_rax
(va_upd_mem va_x_mem
va_s0)))))
)))))))))))))))
in
va_get_ok va_sM /\
(let dst_in:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0)
in
let inA_in:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0)
in
let a0 = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let a1 = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let a2 = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let a3 = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in
let d1 = Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in
let d2 = Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in
let d3 = Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in
let a = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d = Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in
d == a + inB_in /\
Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\
(win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\
(win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\
(win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\
(~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.Curve25519.X64.FastUtil.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Curve25519.X64.FastUtil.fsti"
} | [
"total"
] | [
"Prims.bool",
"Vale.X64.Memory.buffer64",
"Vale.X64.Memory.nat64",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Prims.eq2",
"Vale.Def.Words_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Stack_i.init_rsp",
"Vale.X64.Decls.va_get_stack",
"Vale.X64.Memory.is_initial_heap",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Decls.va_get_mem",
"Vale.X64.CPU_Features_s.adx_enabled",
"Vale.X64.CPU_Features_s.bmi2_enabled",
"Prims.l_or",
"Vale.X64.Decls.buffers_disjoint",
"Prims.op_Equality",
"Vale.X64.Decls.va_if",
"Vale.Def.Types_s.nat64",
"Vale.X64.Machine_s.rR8",
"Prims.l_not",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Decls.validDstAddrs64",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.validSrcAddrs64",
"Vale.X64.Decls.va_int_range",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rRdi",
"Prims.l_Forall",
"Vale.X64.InsBasic.vale_heap",
"Vale.X64.Flags.t",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Vale.X64.InsBasic.vale_stack",
"Vale.X64.Memory.memtaint",
"Prims.l_imp",
"Prims.int",
"Prims.op_Addition",
"Vale.X64.Decls.modifies_buffer",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR15",
"Prims.nat",
"Vale.Curve25519.Fast_defs.pow2_five",
"Vale.X64.Machine_s.rRax",
"Vale.Curve25519.Fast_defs.pow2_four",
"Vale.X64.Decls.buffer64_read",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_stackTaint",
"Vale.X64.Decls.va_upd_stack",
"Vale.X64.Decls.va_upd_mem_layout",
"Vale.X64.Decls.va_upd_mem_heaplet",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Decls.va_upd_mem"
] | [] | module Vale.Curve25519.X64.FastUtil
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.Curve25519.Fast_defs
open Vale.X64.CPU_Features_s
//-- Fast_add1
val va_code_Fast_add1 : va_dummy:unit -> Tot va_code
val va_codegen_success_Fast_add1 : va_dummy:unit -> Tot va_pbool
let va_req_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
: prop =
(va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0))
let va_ens_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1 va_b0 va_s0 dst_b inA_b inB /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))
val va_lemma_Fast_add1 : va_b0:va_code -> va_s0:va_state -> dst_b:buffer64 -> inA_b:buffer64 ->
inB:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))
[@ va_qattr]
let va_wp_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) (va_s0:va_state) (va_k:(va_state
-> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
adx_enabled /\ bmi2_enabled /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0)
(va_get_mem va_s0) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rdx:nat64) (va_x_r8:nat64)
(va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64) (va_x_heap0:vale_heap)
(va_x_memLayout:vale_heap_layout) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags
va_x_efl (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8
(va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem va_s0))))))))) in
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Fast_add1 : dst_b:buffer64 -> inA_b:buffer64 -> inB:nat64 -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1 dst_b inA_b inB va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1 ()) ([va_Mod_flags;
va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64
rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx; va_Mod_reg64 rRax; va_Mod_mem]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) : (va_quickCode unit
(va_code_Fast_add1 ())) =
(va_QProc (va_code_Fast_add1 ()) ([va_Mod_flags; va_Mod_mem_layout; va_Mod_mem_heaplet 0;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx;
va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1 dst_b inA_b inB) (va_wpProof_Fast_add1 dst_b
inA_b inB))
//--
//-- Fast_add1_stdcall
val va_code_Fast_add1_stdcall : win:bool -> Tot va_code
val va_codegen_success_Fast_add1_stdcall : win:bool -> Tot va_pbool
let va_req_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) : prop =
(va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret))
let va_ens_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1_stdcall va_b0 va_s0 win dst_b inA_b inB_in /\ va_ensure_total va_b0 va_s0 va_sM
va_fM /\ va_get_ok va_sM /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in
let a0 = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))
val va_lemma_Fast_add1_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> dst_b:buffer64 ->
inA_b:buffer64 -> inB_in:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let a0 = Vale.X64.Decls.buffer64_read
inA_b 0 (va_get_mem va_s0) in let a1 = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0)
in let a2 = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64) | false | true | Vale.Curve25519.X64.FastUtil.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Fast_add1_stdcall
(win: bool)
(dst_b inA_b: buffer64)
(inB_in: nat64)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.Curve25519.X64.FastUtil.va_wp_Fast_add1_stdcall | {
"file_name": "obj/Vale.Curve25519.X64.FastUtil.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
win: Prims.bool ->
dst_b: Vale.X64.Memory.buffer64 ->
inA_b: Vale.X64.Memory.buffer64 ->
inB_in: Vale.X64.Memory.nat64 ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 50,
"end_line": 280,
"start_col": 2,
"start_line": 235
} |
Prims.Tot | val va_wp_Cswap2_stdcall
(win: bool)
(bit_in: nat64)
(p0_b p1_b: buffer64)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Cswap2_stdcall (win:bool) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (p0_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let (p1_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64
rRdx va_s0) in let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
1 (va_get_mem va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 2 (va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in let
(old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0)
in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem
va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0
(va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
1 (va_get_mem va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 2 (va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in let
(old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0)
in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem
va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ bit_in <= 1 /\
bit_in = va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) /\
(Vale.X64.Decls.buffers_disjoint p0_b p1_b \/ p1_b == p0_b) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) p0_in p0_b 8 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) p1_in p1_b 8 (va_get_mem_layout va_s0)
Secret) /\ (forall (va_x_mem:vale_heap) (va_x_rdx:nat64) (va_x_rsi:nat64) (va_x_rdi:nat64)
(va_x_rsp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_heap0:vale_heap) (va_x_memLayout:vale_heap_layout) (va_x_stack:vale_stack)
(va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint va_x_stackTaint (va_upd_stack
va_x_stack (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags
va_x_efl (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8
(va_upd_reg64 rRsp va_x_rsp (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRsi va_x_rsi
(va_upd_reg64 rRdx va_x_rdx (va_upd_mem va_x_mem va_s0)))))))))))) in va_get_ok va_sM /\ (let
(p0_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0)
(fun _ -> va_get_reg64 rRsi va_s0) in let (p1_in:(va_int_range 0 18446744073709551615)) = va_if
win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) in let
(old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0)
in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem
va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3
(va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let
(old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0)
in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem
va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3
(va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in let p0_0 =
Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in let p0_1 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == va_if (bit_in = 1) (fun _ ->
old_p1_0) (fun _ -> old_p0_0) /\ p0_1 == va_if (bit_in = 1) (fun _ -> old_p1_1) (fun _ ->
old_p0_1) /\ p0_2 == va_if (bit_in = 1) (fun _ -> old_p1_2) (fun _ -> old_p0_2) /\ p0_3 ==
va_if (bit_in = 1) (fun _ -> old_p1_3) (fun _ -> old_p0_3) /\ p0_4 == va_if (bit_in = 1) (fun _
-> old_p1_4) (fun _ -> old_p0_4) /\ p0_5 == va_if (bit_in = 1) (fun _ -> old_p1_5) (fun _ ->
old_p0_5) /\ p0_6 == va_if (bit_in = 1) (fun _ -> old_p1_6) (fun _ -> old_p0_6) /\ p0_7 ==
va_if (bit_in = 1) (fun _ -> old_p1_7) (fun _ -> old_p0_7) /\ p1_0 == va_if (bit_in = 1) (fun _
-> old_p0_0) (fun _ -> old_p1_0) /\ p1_1 == va_if (bit_in = 1) (fun _ -> old_p0_1) (fun _ ->
old_p1_1) /\ p1_2 == va_if (bit_in = 1) (fun _ -> old_p0_2) (fun _ -> old_p1_2) /\ p1_3 ==
va_if (bit_in = 1) (fun _ -> old_p0_3) (fun _ -> old_p1_3) /\ p1_4 == va_if (bit_in = 1) (fun _
-> old_p0_4) (fun _ -> old_p1_4) /\ p1_5 == va_if (bit_in = 1) (fun _ -> old_p0_5) (fun _ ->
old_p1_5) /\ p1_6 == va_if (bit_in = 1) (fun _ -> old_p0_6) (fun _ -> old_p1_6) /\ p1_7 ==
va_if (bit_in = 1) (fun _ -> old_p0_7) (fun _ -> old_p1_7) /\ Vale.X64.Decls.modifies_buffer_2
p0_b p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRdi va_sM ==
va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) ==> va_k va_sM (()))) | val va_wp_Cswap2_stdcall
(win: bool)
(bit_in: nat64)
(p0_b p1_b: buffer64)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_Cswap2_stdcall
(win: bool)
(bit_in: nat64)
(p0_b p1_b: buffer64)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_get_ok va_s0 /\
(let p0_in:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0)
in
let p1_in:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0)
in
let old_p0_0:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let old_p0_1:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in
let old_p0_2:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0) in
let old_p0_3:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in
let old_p0_4:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in
let old_p0_5:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let old_p0_6:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in
let old_p0_7:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in
let old_p1_0:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let old_p1_1:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in
let old_p1_2:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0) in
let old_p1_3:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in
let old_p1_4:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in
let old_p1_5:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let old_p1_6:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in
let old_p1_7:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ bit_in <= 1 /\
bit_in = va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) /\
(Vale.X64.Decls.buffers_disjoint p0_b p1_b \/ p1_b == p0_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) p0_in p0_b 8 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) p1_in p1_b 8 (va_get_mem_layout va_s0) Secret) /\
(forall (va_x_mem: vale_heap) (va_x_rdx: nat64) (va_x_rsi: nat64) (va_x_rdi: nat64)
(va_x_rsp: nat64) (va_x_r8: nat64) (va_x_r9: nat64) (va_x_r10: nat64)
(va_x_efl: Vale.X64.Flags.t) (va_x_heap0: vale_heap) (va_x_memLayout: vale_heap_layout)
(va_x_stack: vale_stack) (va_x_stackTaint: memtaint).
let va_sM =
va_upd_stackTaint va_x_stackTaint
(va_upd_stack va_x_stack
(va_upd_mem_layout va_x_memLayout
(va_upd_mem_heaplet 0
va_x_heap0
(va_upd_flags va_x_efl
(va_upd_reg64 rR10
va_x_r10
(va_upd_reg64 rR9
va_x_r9
(va_upd_reg64 rR8
va_x_r8
(va_upd_reg64 rRsp
va_x_rsp
(va_upd_reg64 rRdi
va_x_rdi
(va_upd_reg64 rRsi
va_x_rsi
(va_upd_reg64 rRdx
va_x_rdx
(va_upd_mem va_x_mem va_s0))))))))))))
in
va_get_ok va_sM /\
(let p0_in:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0)
in
let p1_in:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0)
in
let old_p0_0:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0)
in
let old_p0_1:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0)
in
let old_p0_2:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0)
in
let old_p0_3:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0)
in
let old_p0_4:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0)
in
let old_p0_5:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0)
in
let old_p0_6:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0)
in
let old_p0_7:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0)
in
let old_p1_0:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0)
in
let old_p1_1:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0)
in
let old_p1_2:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0)
in
let old_p1_3:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0)
in
let old_p1_4:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0)
in
let old_p1_5:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0)
in
let old_p1_6:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0)
in
let old_p1_7:Vale.Def.Types_s.nat64 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0)
in
let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in
let p0_1 = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in
let p0_2 = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in
let p0_3 = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in
let p0_4 = Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in
let p0_5 = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in
let p0_6 = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in
let p0_7 = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in
let p1_0 = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in
let p1_1 = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in
let p1_2 = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in
let p1_3 = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in
let p1_4 = Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in
let p1_5 = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in
let p1_6 = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in
let p1_7 = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in
p0_0 == va_if (bit_in = 1) (fun _ -> old_p1_0) (fun _ -> old_p0_0) /\
p0_1 == va_if (bit_in = 1) (fun _ -> old_p1_1) (fun _ -> old_p0_1) /\
p0_2 == va_if (bit_in = 1) (fun _ -> old_p1_2) (fun _ -> old_p0_2) /\
p0_3 == va_if (bit_in = 1) (fun _ -> old_p1_3) (fun _ -> old_p0_3) /\
p0_4 == va_if (bit_in = 1) (fun _ -> old_p1_4) (fun _ -> old_p0_4) /\
p0_5 == va_if (bit_in = 1) (fun _ -> old_p1_5) (fun _ -> old_p0_5) /\
p0_6 == va_if (bit_in = 1) (fun _ -> old_p1_6) (fun _ -> old_p0_6) /\
p0_7 == va_if (bit_in = 1) (fun _ -> old_p1_7) (fun _ -> old_p0_7) /\
p1_0 == va_if (bit_in = 1) (fun _ -> old_p0_0) (fun _ -> old_p1_0) /\
p1_1 == va_if (bit_in = 1) (fun _ -> old_p0_1) (fun _ -> old_p1_1) /\
p1_2 == va_if (bit_in = 1) (fun _ -> old_p0_2) (fun _ -> old_p1_2) /\
p1_3 == va_if (bit_in = 1) (fun _ -> old_p0_3) (fun _ -> old_p1_3) /\
p1_4 == va_if (bit_in = 1) (fun _ -> old_p0_4) (fun _ -> old_p1_4) /\
p1_5 == va_if (bit_in = 1) (fun _ -> old_p0_5) (fun _ -> old_p1_5) /\
p1_6 == va_if (bit_in = 1) (fun _ -> old_p0_6) (fun _ -> old_p1_6) /\
p1_7 == va_if (bit_in = 1) (fun _ -> old_p0_7) (fun _ -> old_p1_7) /\
Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\
(win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\
(win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.Curve25519.X64.FastUtil.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Curve25519.X64.FastUtil.fsti"
} | [
"total"
] | [
"Prims.bool",
"Vale.X64.Memory.nat64",
"Vale.X64.Memory.buffer64",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Prims.eq2",
"Vale.Def.Words_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Stack_i.init_rsp",
"Vale.X64.Decls.va_get_stack",
"Vale.X64.Memory.is_initial_heap",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Decls.va_get_mem",
"Prims.op_LessThanOrEqual",
"Prims.op_Equality",
"Vale.X64.Decls.va_if",
"Vale.Def.Types_s.nat64",
"Vale.X64.Machine_s.rRcx",
"Prims.l_not",
"Vale.X64.Machine_s.rRdi",
"Prims.l_or",
"Vale.X64.Decls.buffers_disjoint",
"Vale.X64.Decls.validDstAddrs64",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.buffer64_read",
"Vale.X64.Decls.va_int_range",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Machine_s.rRsi",
"Prims.l_Forall",
"Vale.X64.InsBasic.vale_heap",
"Vale.X64.Flags.t",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Vale.X64.InsBasic.vale_stack",
"Vale.X64.Memory.memtaint",
"Prims.l_imp",
"Prims.int",
"Vale.X64.Decls.modifies_buffer_2",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_stackTaint",
"Vale.X64.Decls.va_upd_stack",
"Vale.X64.Decls.va_upd_mem_layout",
"Vale.X64.Decls.va_upd_mem_heaplet",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Decls.va_upd_mem"
] | [] | module Vale.Curve25519.X64.FastUtil
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.Curve25519.Fast_defs
open Vale.X64.CPU_Features_s
//-- Fast_add1
val va_code_Fast_add1 : va_dummy:unit -> Tot va_code
val va_codegen_success_Fast_add1 : va_dummy:unit -> Tot va_pbool
let va_req_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
: prop =
(va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0))
let va_ens_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1 va_b0 va_s0 dst_b inA_b inB /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))
val va_lemma_Fast_add1 : va_b0:va_code -> va_s0:va_state -> dst_b:buffer64 -> inA_b:buffer64 ->
inB:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))
[@ va_qattr]
let va_wp_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) (va_s0:va_state) (va_k:(va_state
-> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
adx_enabled /\ bmi2_enabled /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0)
(va_get_mem va_s0) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rdx:nat64) (va_x_r8:nat64)
(va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64) (va_x_heap0:vale_heap)
(va_x_memLayout:vale_heap_layout) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags
va_x_efl (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8
(va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem va_s0))))))))) in
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Fast_add1 : dst_b:buffer64 -> inA_b:buffer64 -> inB:nat64 -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1 dst_b inA_b inB va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1 ()) ([va_Mod_flags;
va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64
rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx; va_Mod_reg64 rRax; va_Mod_mem]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) : (va_quickCode unit
(va_code_Fast_add1 ())) =
(va_QProc (va_code_Fast_add1 ()) ([va_Mod_flags; va_Mod_mem_layout; va_Mod_mem_heaplet 0;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx;
va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1 dst_b inA_b inB) (va_wpProof_Fast_add1 dst_b
inA_b inB))
//--
//-- Fast_add1_stdcall
val va_code_Fast_add1_stdcall : win:bool -> Tot va_code
val va_codegen_success_Fast_add1_stdcall : win:bool -> Tot va_pbool
let va_req_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) : prop =
(va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret))
let va_ens_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1_stdcall va_b0 va_s0 win dst_b inA_b inB_in /\ va_ensure_total va_b0 va_s0 va_sM
va_fM /\ va_get_ok va_sM /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in
let a0 = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))
val va_lemma_Fast_add1_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> dst_b:buffer64 ->
inA_b:buffer64 -> inB_in:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let a0 = Vale.X64.Decls.buffer64_read
inA_b 0 (va_get_mem va_s0) in let a1 = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0)
in let a2 = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64)
(va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\
bmi2_enabled) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout
va_s0) Secret) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rbx:nat64)
(va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rsi:nat64) (va_x_rdi:nat64) (va_x_rbp:nat64)
(va_x_rsp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64)
(va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_heap0:vale_heap) (va_x_memLayout:vale_heap_layout) (va_x_stack:vale_stack)
(va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint va_x_stackTaint (va_upd_stack
va_x_stack (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags
va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13
(va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64
rR8 va_x_r8 (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64 rRdi va_x_rdi
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx va_x_rcx
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem
va_s0)))))))))))))))))))) in va_get_ok va_sM /\ (let (dst_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64
rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let a0 =
Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) ==> va_k va_sM (())))
val va_wpProof_Fast_add1_stdcall : win:bool -> dst_b:buffer64 -> inA_b:buffer64 -> inB_in:nat64 ->
va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1_stdcall win dst_b inA_b inB_in va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1_stdcall win)
([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags;
va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR11; va_Mod_reg64 rR10;
va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp; va_Mod_reg64 rRbp; va_Mod_reg64 rRdi;
va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax;
va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64) :
(va_quickCode unit (va_code_Fast_add1_stdcall win)) =
(va_QProc (va_code_Fast_add1_stdcall win) ([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout;
va_Mod_mem_heaplet 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp;
va_Mod_reg64 rRbp; va_Mod_reg64 rRdi; va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx;
va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1_stdcall win dst_b inA_b
inB_in) (va_wpProof_Fast_add1_stdcall win dst_b inA_b inB_in))
//--
//-- Cswap2
val va_code_Cswap2 : va_dummy:unit -> Tot va_code
val va_codegen_success_Cswap2 : va_dummy:unit -> Tot va_pbool
let va_req_Cswap2 (va_b0:va_code) (va_s0:va_state) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) :
prop =
(va_require_total va_b0 (va_code_Cswap2 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0)
in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem
va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3
(va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let
(old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0)
in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem
va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3
(va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ bit_in == va_get_reg64 rRdi va_s0 /\
va_get_reg64 rRdi va_s0 <= 1 /\ (Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRdx va_s0) p1_b 8 (va_get_mem_layout va_s0) Secret))
let va_ens_Cswap2 (va_b0:va_code) (va_s0:va_state) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Cswap2 va_b0 va_s0 bit_in p0_b p1_b /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
1 (va_get_mem va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 2 (va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in let
(old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0)
in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem
va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0
(va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
1 (va_get_mem va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 2 (va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in let
(old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0)
in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem
va_s0) in Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\
(let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in let p0_1 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == (if (va_get_reg64 rRdi va_s0
= 1) then old_p1_0 else old_p0_0) /\ p0_1 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_1
else old_p0_1) /\ p0_2 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_2 else old_p0_2) /\
p0_3 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_3 else old_p0_3) /\ p0_4 == (if
(va_get_reg64 rRdi va_s0 = 1) then old_p1_4 else old_p0_4) /\ p0_5 == (if (va_get_reg64 rRdi
va_s0 = 1) then old_p1_5 else old_p0_5) /\ p0_6 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p1_6 else old_p0_6) /\ p0_7 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_7 else
old_p0_7) /\ p1_0 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_0 else old_p1_0) /\ p1_1 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_1 else old_p1_1) /\ p1_2 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_2 else old_p1_2) /\ p1_3 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p0_3 else old_p1_3) /\ p1_4 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_4 else
old_p1_4) /\ p1_5 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_5 else old_p1_5) /\ p1_6 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_6 else old_p1_6) /\ p1_7 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_7 else old_p1_7))) /\ va_state_eq va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_flags va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdi va_sM (va_update_ok
va_sM (va_update_mem va_sM va_s0))))))))))
val va_lemma_Cswap2 : va_b0:va_code -> va_s0:va_state -> bit_in:nat64 -> p0_b:buffer64 ->
p1_b:buffer64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Cswap2 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0)
in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem
va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3
(va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let
(old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0)
in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem
va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3
(va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ bit_in == va_get_reg64 rRdi va_s0 /\
va_get_reg64 rRdi va_s0 <= 1 /\ (Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRdx va_s0) p1_b 8 (va_get_mem_layout va_s0) Secret)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0)
in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem
va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2
(va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let
(old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in
let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0)
in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem
va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2
(va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let
(old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in
Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (let p0_0 =
Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in let p0_1 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == (if (va_get_reg64 rRdi va_s0
= 1) then old_p1_0 else old_p0_0) /\ p0_1 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_1
else old_p0_1) /\ p0_2 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_2 else old_p0_2) /\
p0_3 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_3 else old_p0_3) /\ p0_4 == (if
(va_get_reg64 rRdi va_s0 = 1) then old_p1_4 else old_p0_4) /\ p0_5 == (if (va_get_reg64 rRdi
va_s0 = 1) then old_p1_5 else old_p0_5) /\ p0_6 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p1_6 else old_p0_6) /\ p0_7 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_7 else
old_p0_7) /\ p1_0 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_0 else old_p1_0) /\ p1_1 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_1 else old_p1_1) /\ p1_2 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_2 else old_p1_2) /\ p1_3 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p0_3 else old_p1_3) /\ p1_4 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_4 else
old_p1_4) /\ p1_5 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_5 else old_p1_5) /\ p1_6 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_6 else old_p1_6) /\ p1_7 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_7 else old_p1_7))) /\ va_state_eq va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_flags va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdi va_sM (va_update_ok
va_sM (va_update_mem va_sM va_s0)))))))))))
[@ va_qattr]
let va_wp_Cswap2 (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) (va_s0:va_state) (va_k:(va_state ->
unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
1 (va_get_mem va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 2 (va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in let
(old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0)
in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem
va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0
(va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
1 (va_get_mem va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 2 (va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in let
(old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0)
in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem
va_s0) in Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
bit_in == va_get_reg64 rRdi va_s0 /\ va_get_reg64 rRdi va_s0 <= 1 /\
(Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdx va_s0) p1_b 8
(va_get_mem_layout va_s0) Secret) /\ (forall (va_x_mem:vale_heap) (va_x_rdi:nat64)
(va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_heap0:vale_heap) (va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout
va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags va_x_efl (va_upd_reg64 rR10
va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRdi va_x_rdi
(va_upd_mem va_x_mem va_s0))))))) in va_get_ok va_sM /\ (let (old_p0_0:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in let
(old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in
let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0)
in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem
va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 4
(va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in let
(old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in
let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0)
in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem
va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 4
(va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Decls.modifies_buffer_2 p0_b
p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_sM) in let p0_1 = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let
p0_2 = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == va_if (va_get_reg64 rRdi
va_s0 = 1) (fun _ -> old_p1_0) (fun _ -> old_p0_0) /\ p0_1 == va_if (va_get_reg64 rRdi va_s0 =
1) (fun _ -> old_p1_1) (fun _ -> old_p0_1) /\ p0_2 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun
_ -> old_p1_2) (fun _ -> old_p0_2) /\ p0_3 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_3) (fun _ -> old_p0_3) /\ p0_4 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_4) (fun _ -> old_p0_4) /\ p0_5 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_5) (fun _ -> old_p0_5) /\ p0_6 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_6) (fun _ -> old_p0_6) /\ p0_7 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_7) (fun _ -> old_p0_7) /\ p1_0 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_0) (fun _ -> old_p1_0) /\ p1_1 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_1) (fun _ -> old_p1_1) /\ p1_2 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_2) (fun _ -> old_p1_2) /\ p1_3 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_3) (fun _ -> old_p1_3) /\ p1_4 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_4) (fun _ -> old_p1_4) /\ p1_5 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_5) (fun _ -> old_p1_5) /\ p1_6 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_6) (fun _ -> old_p1_6) /\ p1_7 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_7) (fun _ -> old_p1_7))) ==> va_k va_sM (())))
val va_wpProof_Cswap2 : bit_in:nat64 -> p0_b:buffer64 -> p1_b:buffer64 -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Cswap2 bit_in p0_b p1_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Cswap2 ()) ([va_Mod_mem_layout;
va_Mod_mem_heaplet 0; va_Mod_flags; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8;
va_Mod_reg64 rRdi; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Cswap2 (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) : (va_quickCode unit
(va_code_Cswap2 ())) =
(va_QProc (va_code_Cswap2 ()) ([va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags;
va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdi; va_Mod_mem])
(va_wp_Cswap2 bit_in p0_b p1_b) (va_wpProof_Cswap2 bit_in p0_b p1_b))
//--
//-- Cswap2_stdcall
val va_code_Cswap2_stdcall : win:bool -> Tot va_code
val va_codegen_success_Cswap2_stdcall : win:bool -> Tot va_pbool
let va_req_Cswap2_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (bit_in:nat64)
(p0_b:buffer64) (p1_b:buffer64) : prop =
(va_require_total va_b0 (va_code_Cswap2_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(p0_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else
va_get_reg64 rRsi va_s0) in let (p1_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (old_p0_0:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in let
(old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0) in
let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0)
in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem
va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5
(va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 7 (va_get_mem va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in let
(old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0) in
let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0)
in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem
va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5
(va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 7 (va_get_mem va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack
va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
bit_in <= 1 /\ bit_in = (if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) /\
(Vale.X64.Decls.buffers_disjoint p0_b p1_b \/ p1_b == p0_b) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) p0_in p0_b 8 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) p1_in p1_b 8 (va_get_mem_layout va_s0)
Secret))
let va_ens_Cswap2_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (bit_in:nat64)
(p0_b:buffer64) (p1_b:buffer64) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Cswap2_stdcall va_b0 va_s0 win bit_in p0_b p1_b /\ va_ensure_total va_b0 va_s0 va_sM
va_fM /\ va_get_ok va_sM /\ (let (p0_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (p1_in:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0)
in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem
va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2
(va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let
(old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in
let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0)
in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem
va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2
(va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let
(old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in
let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in let p0_1 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == (if (bit_in = 1) then
old_p1_0 else old_p0_0) /\ p0_1 == (if (bit_in = 1) then old_p1_1 else old_p0_1) /\ p0_2 == (if
(bit_in = 1) then old_p1_2 else old_p0_2) /\ p0_3 == (if (bit_in = 1) then old_p1_3 else
old_p0_3) /\ p0_4 == (if (bit_in = 1) then old_p1_4 else old_p0_4) /\ p0_5 == (if (bit_in = 1)
then old_p1_5 else old_p0_5) /\ p0_6 == (if (bit_in = 1) then old_p1_6 else old_p0_6) /\ p0_7
== (if (bit_in = 1) then old_p1_7 else old_p0_7) /\ p1_0 == (if (bit_in = 1) then old_p0_0 else
old_p1_0) /\ p1_1 == (if (bit_in = 1) then old_p0_1 else old_p1_1) /\ p1_2 == (if (bit_in = 1)
then old_p0_2 else old_p1_2) /\ p1_3 == (if (bit_in = 1) then old_p0_3 else old_p1_3) /\ p1_4
== (if (bit_in = 1) then old_p0_4 else old_p1_4) /\ p1_5 == (if (bit_in = 1) then old_p0_5 else
old_p1_5) /\ p1_6 == (if (bit_in = 1) then old_p0_6 else old_p1_6) /\ p1_7 == (if (bit_in = 1)
then old_p0_7 else old_p1_7) /\ Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0)
(va_get_mem va_sM) /\ (win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==>
va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\ va_get_reg64 rRsp va_sM == va_get_reg64
rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM
(va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRsp va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))
val va_lemma_Cswap2_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> bit_in:nat64 ->
p0_b:buffer64 -> p1_b:buffer64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Cswap2_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(p0_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else
va_get_reg64 rRsi va_s0) in let (p1_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (old_p0_0:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in let
(old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0) in
let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0)
in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem
va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5
(va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 7 (va_get_mem va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in let
(old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0) in
let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0)
in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem
va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5
(va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 7 (va_get_mem va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack
va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
bit_in <= 1 /\ bit_in = (if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) /\
(Vale.X64.Decls.buffers_disjoint p0_b p1_b \/ p1_b == p0_b) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) p0_in p0_b 8 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) p1_in p1_b 8 (va_get_mem_layout va_s0)
Secret)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (p0_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else
va_get_reg64 rRsi va_s0) in let (p1_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (old_p0_0:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in let
(old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0) in
let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0)
in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem
va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5
(va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 7 (va_get_mem va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in let
(old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0) in
let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0)
in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem
va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5
(va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 7 (va_get_mem va_s0) in let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM)
in let p0_1 = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == (if (bit_in = 1) then
old_p1_0 else old_p0_0) /\ p0_1 == (if (bit_in = 1) then old_p1_1 else old_p0_1) /\ p0_2 == (if
(bit_in = 1) then old_p1_2 else old_p0_2) /\ p0_3 == (if (bit_in = 1) then old_p1_3 else
old_p0_3) /\ p0_4 == (if (bit_in = 1) then old_p1_4 else old_p0_4) /\ p0_5 == (if (bit_in = 1)
then old_p1_5 else old_p0_5) /\ p0_6 == (if (bit_in = 1) then old_p1_6 else old_p0_6) /\ p0_7
== (if (bit_in = 1) then old_p1_7 else old_p0_7) /\ p1_0 == (if (bit_in = 1) then old_p0_0 else
old_p1_0) /\ p1_1 == (if (bit_in = 1) then old_p0_1 else old_p1_1) /\ p1_2 == (if (bit_in = 1)
then old_p0_2 else old_p1_2) /\ p1_3 == (if (bit_in = 1) then old_p0_3 else old_p1_3) /\ p1_4
== (if (bit_in = 1) then old_p0_4 else old_p1_4) /\ p1_5 == (if (bit_in = 1) then old_p0_5 else
old_p1_5) /\ p1_6 == (if (bit_in = 1) then old_p0_6 else old_p1_6) /\ p1_7 == (if (bit_in = 1)
then old_p0_7 else old_p1_7) /\ Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0)
(va_get_mem va_sM) /\ (win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==>
va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\ va_get_reg64 rRsp va_sM == va_get_reg64
rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM
(va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRsp va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))
[@ va_qattr]
let va_wp_Cswap2_stdcall (win:bool) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) (va_s0:va_state) | false | true | Vale.Curve25519.X64.FastUtil.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Cswap2_stdcall
(win: bool)
(bit_in: nat64)
(p0_b p1_b: buffer64)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.Curve25519.X64.FastUtil.va_wp_Cswap2_stdcall | {
"file_name": "obj/Vale.Curve25519.X64.FastUtil.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
win: Prims.bool ->
bit_in: Vale.X64.Memory.nat64 ->
p0_b: Vale.X64.Memory.buffer64 ->
p1_b: Vale.X64.Memory.buffer64 ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 77,
"end_line": 831,
"start_col": 2,
"start_line": 745
} |
Prims.Tot | val va_ens_Cswap2_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(bit_in: nat64)
(p0_b p1_b: buffer64)
(va_sM: va_state)
(va_fM: va_fuel)
: prop | [
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_ens_Cswap2_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (bit_in:nat64)
(p0_b:buffer64) (p1_b:buffer64) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Cswap2_stdcall va_b0 va_s0 win bit_in p0_b p1_b /\ va_ensure_total va_b0 va_s0 va_sM
va_fM /\ va_get_ok va_sM /\ (let (p0_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (p1_in:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0)
in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem
va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2
(va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let
(old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in
let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0)
in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem
va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2
(va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let
(old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in
let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in let p0_1 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == (if (bit_in = 1) then
old_p1_0 else old_p0_0) /\ p0_1 == (if (bit_in = 1) then old_p1_1 else old_p0_1) /\ p0_2 == (if
(bit_in = 1) then old_p1_2 else old_p0_2) /\ p0_3 == (if (bit_in = 1) then old_p1_3 else
old_p0_3) /\ p0_4 == (if (bit_in = 1) then old_p1_4 else old_p0_4) /\ p0_5 == (if (bit_in = 1)
then old_p1_5 else old_p0_5) /\ p0_6 == (if (bit_in = 1) then old_p1_6 else old_p0_6) /\ p0_7
== (if (bit_in = 1) then old_p1_7 else old_p0_7) /\ p1_0 == (if (bit_in = 1) then old_p0_0 else
old_p1_0) /\ p1_1 == (if (bit_in = 1) then old_p0_1 else old_p1_1) /\ p1_2 == (if (bit_in = 1)
then old_p0_2 else old_p1_2) /\ p1_3 == (if (bit_in = 1) then old_p0_3 else old_p1_3) /\ p1_4
== (if (bit_in = 1) then old_p0_4 else old_p1_4) /\ p1_5 == (if (bit_in = 1) then old_p0_5 else
old_p1_5) /\ p1_6 == (if (bit_in = 1) then old_p0_6 else old_p1_6) /\ p1_7 == (if (bit_in = 1)
then old_p0_7 else old_p1_7) /\ Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0)
(va_get_mem va_sM) /\ (win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==>
va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\ va_get_reg64 rRsp va_sM == va_get_reg64
rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM
(va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRsp va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))) | val va_ens_Cswap2_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(bit_in: nat64)
(p0_b p1_b: buffer64)
(va_sM: va_state)
(va_fM: va_fuel)
: prop
let va_ens_Cswap2_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(bit_in: nat64)
(p0_b p1_b: buffer64)
(va_sM: va_state)
(va_fM: va_fuel)
: prop = | false | null | false | (va_req_Cswap2_stdcall va_b0 va_s0 win bit_in p0_b p1_b /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\
(let p0_in:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0)
in
let p1_in:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0)
in
let old_p0_0:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let old_p0_1:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in
let old_p0_2:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0) in
let old_p0_3:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in
let old_p0_4:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in
let old_p0_5:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let old_p0_6:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in
let old_p0_7:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in
let old_p1_0:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let old_p1_1:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in
let old_p1_2:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0) in
let old_p1_3:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in
let old_p1_4:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in
let old_p1_5:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let old_p1_6:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in
let old_p1_7:Vale.Def.Types_s.nat64 = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in
let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in
let p0_1 = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in
let p0_2 = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in
let p0_3 = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in
let p0_4 = Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in
let p0_5 = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in
let p0_6 = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in
let p0_7 = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in
let p1_0 = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in
let p1_1 = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in
let p1_2 = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in
let p1_3 = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in
let p1_4 = Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in
let p1_5 = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in
let p1_6 = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in
let p1_7 = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in
p0_0 == (if (bit_in = 1) then old_p1_0 else old_p0_0) /\
p0_1 == (if (bit_in = 1) then old_p1_1 else old_p0_1) /\
p0_2 == (if (bit_in = 1) then old_p1_2 else old_p0_2) /\
p0_3 == (if (bit_in = 1) then old_p1_3 else old_p0_3) /\
p0_4 == (if (bit_in = 1) then old_p1_4 else old_p0_4) /\
p0_5 == (if (bit_in = 1) then old_p1_5 else old_p0_5) /\
p0_6 == (if (bit_in = 1) then old_p1_6 else old_p0_6) /\
p0_7 == (if (bit_in = 1) then old_p1_7 else old_p0_7) /\
p1_0 == (if (bit_in = 1) then old_p0_0 else old_p1_0) /\
p1_1 == (if (bit_in = 1) then old_p0_1 else old_p1_1) /\
p1_2 == (if (bit_in = 1) then old_p0_2 else old_p1_2) /\
p1_3 == (if (bit_in = 1) then old_p0_3 else old_p1_3) /\
p1_4 == (if (bit_in = 1) then old_p0_4 else old_p1_4) /\
p1_5 == (if (bit_in = 1) then old_p0_5 else old_p1_5) /\
p1_6 == (if (bit_in = 1) then old_p0_6 else old_p1_6) /\
p1_7 == (if (bit_in = 1) then old_p0_7 else old_p1_7) /\
Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\
(win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\
(win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
va_state_eq va_sM
(va_update_stackTaint va_sM
(va_update_stack va_sM
(va_update_mem_layout va_sM
(va_update_mem_heaplet 0
va_sM
(va_update_flags va_sM
(va_update_reg64 rR10
va_sM
(va_update_reg64 rR9
va_sM
(va_update_reg64 rR8
va_sM
(va_update_reg64 rRsp
va_sM
(va_update_reg64 rRdi
va_sM
(va_update_reg64 rRsi
va_sM
(va_update_reg64 rRdx
va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))
))))))))))))) | {
"checked_file": "Vale.Curve25519.X64.FastUtil.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Curve25519.X64.FastUtil.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_code",
"Vale.X64.Decls.va_state",
"Prims.bool",
"Vale.X64.Memory.nat64",
"Vale.X64.Memory.buffer64",
"Vale.X64.Decls.va_fuel",
"Prims.l_and",
"Vale.Curve25519.X64.FastUtil.va_req_Cswap2_stdcall",
"Vale.X64.Decls.va_ensure_total",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Prims.eq2",
"Vale.Def.Words_s.nat64",
"Prims.op_Equality",
"Prims.int",
"Vale.X64.Decls.modifies_buffer_2",
"Vale.X64.Decls.va_get_mem",
"Prims.l_imp",
"Vale.Def.Types_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Decls.buffer64_read",
"Vale.X64.Decls.va_int_range",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Decls.va_state_eq",
"Vale.X64.Decls.va_update_stackTaint",
"Vale.X64.Decls.va_update_stack",
"Vale.X64.Decls.va_update_mem_layout",
"Vale.X64.Decls.va_update_mem_heaplet",
"Vale.X64.Decls.va_update_flags",
"Vale.X64.Decls.va_update_reg64",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Decls.va_update_ok",
"Vale.X64.Decls.va_update_mem",
"Prims.prop"
] | [] | module Vale.Curve25519.X64.FastUtil
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsStack
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.Curve25519.Fast_defs
open Vale.X64.CPU_Features_s
//-- Fast_add1
val va_code_Fast_add1 : va_dummy:unit -> Tot va_code
val va_codegen_success_Fast_add1 : va_dummy:unit -> Tot va_pbool
let va_req_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
: prop =
(va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0))
let va_ens_Fast_add1 (va_b0:va_code) (va_s0:va_state) (dst_b:buffer64) (inA_b:buffer64) (inB:nat64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1 va_b0 va_s0 dst_b inA_b inB /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))
val va_lemma_Fast_add1 : va_b0:va_code -> va_s0:va_state -> dst_b:buffer64 -> inA_b:buffer64 ->
inB:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let
(a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let
(a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let
(a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let
(a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in adx_enabled /\ bmi2_enabled
/\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in
let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in
let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in
let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in
let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))
[@ va_qattr]
let va_wp_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) (va_s0:va_state) (va_k:(va_state
-> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
adx_enabled /\ bmi2_enabled /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0)
(va_get_mem va_s0) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdi va_s0) dst_b 4
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRsi va_s0) inA_b 4 (va_get_mem_layout va_s0) Secret /\ inB == va_get_reg64 rRdx
va_s0) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rdx:nat64) (va_x_r8:nat64)
(va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64) (va_x_heap0:vale_heap)
(va_x_memLayout:vale_heap_layout) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags
va_x_efl (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8
(va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem va_s0))))))))) in
va_get_ok va_sM /\ (let (a0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 0
(va_get_mem va_s0) in let (a1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 1
(va_get_mem va_s0) in let (a2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 2
(va_get_mem va_s0) in let (a3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read inA_b 3
(va_get_mem va_s0) in let (a:Prims.nat) = Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in
let d0 = Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let d =
Vale.Curve25519.Fast_defs.pow2_five d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a +
va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.modifies_buffer dst_b (va_get_mem va_s0) (va_get_mem
va_sM)) ==> va_k va_sM (())))
val va_wpProof_Fast_add1 : dst_b:buffer64 -> inA_b:buffer64 -> inB:nat64 -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1 dst_b inA_b inB va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1 ()) ([va_Mod_flags;
va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64
rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx; va_Mod_reg64 rRax; va_Mod_mem]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1 (dst_b:buffer64) (inA_b:buffer64) (inB:nat64) : (va_quickCode unit
(va_code_Fast_add1 ())) =
(va_QProc (va_code_Fast_add1 ()) ([va_Mod_flags; va_Mod_mem_layout; va_Mod_mem_heaplet 0;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdx;
va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1 dst_b inA_b inB) (va_wpProof_Fast_add1 dst_b
inA_b inB))
//--
//-- Fast_add1_stdcall
val va_code_Fast_add1_stdcall : win:bool -> Tot va_code
val va_codegen_success_Fast_add1_stdcall : win:bool -> Tot va_pbool
let va_req_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) : prop =
(va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret))
let va_ens_Fast_add1_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (dst_b:buffer64)
(inA_b:buffer64) (inB_in:nat64) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Fast_add1_stdcall va_b0 va_s0 win dst_b inA_b inB_in /\ va_ensure_total va_b0 va_s0 va_sM
va_fM /\ va_get_ok va_sM /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in
let a0 = Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))
val va_lemma_Fast_add1_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> dst_b:buffer64 ->
inA_b:buffer64 -> inB_in:nat64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Fast_add1_stdcall win) va_s0 /\ va_get_ok va_s0 /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\ bmi2_enabled) /\
(Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout va_s0)
Secret)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (dst_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let a0 = Vale.X64.Decls.buffer64_read
inA_b 0 (va_get_mem va_s0) in let a1 = Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0)
in let a2 = Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) /\ va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack
va_sM (va_update_mem_layout va_sM (va_update_mem_heaplet 0 va_sM (va_update_flags va_sM
(va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM
(va_update_reg64 rR8 va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdx va_sM
(va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64)
(va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (dst_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let (inA_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (adx_enabled /\
bmi2_enabled) /\ (Vale.X64.Decls.buffers_disjoint dst_b inA_b \/ inA_b == dst_b) /\ inB_in =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) dst_in dst_b 4 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validSrcAddrs64 (va_get_mem va_s0) inA_in inA_b 4 (va_get_mem_layout
va_s0) Secret) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64) (va_x_rbx:nat64)
(va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rsi:nat64) (va_x_rdi:nat64) (va_x_rbp:nat64)
(va_x_rsp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_r11:nat64)
(va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_heap0:vale_heap) (va_x_memLayout:vale_heap_layout) (va_x_stack:vale_stack)
(va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint va_x_stackTaint (va_upd_stack
va_x_stack (va_upd_mem_layout va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags
va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13
(va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64
rR8 va_x_r8 (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64 rRdi va_x_rdi
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx va_x_rcx
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem
va_s0)))))))))))))))))))) in va_get_ok va_sM /\ (let (dst_in:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64
rRdi va_s0) in let (inA_in:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let a0 =
Vale.X64.Decls.buffer64_read inA_b 0 (va_get_mem va_s0) in let a1 =
Vale.X64.Decls.buffer64_read inA_b 1 (va_get_mem va_s0) in let a2 =
Vale.X64.Decls.buffer64_read inA_b 2 (va_get_mem va_s0) in let a3 =
Vale.X64.Decls.buffer64_read inA_b 3 (va_get_mem va_s0) in let d0 =
Vale.X64.Decls.buffer64_read dst_b 0 (va_get_mem va_sM) in let d1 =
Vale.X64.Decls.buffer64_read dst_b 1 (va_get_mem va_sM) in let d2 =
Vale.X64.Decls.buffer64_read dst_b 2 (va_get_mem va_sM) in let d3 =
Vale.X64.Decls.buffer64_read dst_b 3 (va_get_mem va_sM) in let a =
Vale.Curve25519.Fast_defs.pow2_four a0 a1 a2 a3 in let d = Vale.Curve25519.Fast_defs.pow2_five
d0 d1 d2 d3 (va_get_reg64 rRax va_sM) in d == a + inB_in /\ Vale.X64.Decls.modifies_buffer
dst_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64
rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==>
va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM ==
va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14
va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15
va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==>
va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM ==
va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0) ==> va_k va_sM (())))
val va_wpProof_Fast_add1_stdcall : win:bool -> dst_b:buffer64 -> inA_b:buffer64 -> inB_in:nat64 ->
va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Fast_add1_stdcall win dst_b inA_b inB_in va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Fast_add1_stdcall win)
([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags;
va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR11; va_Mod_reg64 rR10;
va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp; va_Mod_reg64 rRbp; va_Mod_reg64 rRdi;
va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax;
va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Fast_add1_stdcall (win:bool) (dst_b:buffer64) (inA_b:buffer64) (inB_in:nat64) :
(va_quickCode unit (va_code_Fast_add1_stdcall win)) =
(va_QProc (va_code_Fast_add1_stdcall win) ([va_Mod_stackTaint; va_Mod_stack; va_Mod_mem_layout;
va_Mod_mem_heaplet 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRsp;
va_Mod_reg64 rRbp; va_Mod_reg64 rRdi; va_Mod_reg64 rRsi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx;
va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Fast_add1_stdcall win dst_b inA_b
inB_in) (va_wpProof_Fast_add1_stdcall win dst_b inA_b inB_in))
//--
//-- Cswap2
val va_code_Cswap2 : va_dummy:unit -> Tot va_code
val va_codegen_success_Cswap2 : va_dummy:unit -> Tot va_pbool
let va_req_Cswap2 (va_b0:va_code) (va_s0:va_state) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) :
prop =
(va_require_total va_b0 (va_code_Cswap2 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0)
in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem
va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3
(va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let
(old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0)
in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem
va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3
(va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ bit_in == va_get_reg64 rRdi va_s0 /\
va_get_reg64 rRdi va_s0 <= 1 /\ (Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRdx va_s0) p1_b 8 (va_get_mem_layout va_s0) Secret))
let va_ens_Cswap2 (va_b0:va_code) (va_s0:va_state) (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64)
(va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Cswap2 va_b0 va_s0 bit_in p0_b p1_b /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
1 (va_get_mem va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 2 (va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in let
(old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0)
in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem
va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0
(va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
1 (va_get_mem va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 2 (va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in let
(old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0)
in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem
va_s0) in Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\
(let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in let p0_1 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == (if (va_get_reg64 rRdi va_s0
= 1) then old_p1_0 else old_p0_0) /\ p0_1 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_1
else old_p0_1) /\ p0_2 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_2 else old_p0_2) /\
p0_3 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_3 else old_p0_3) /\ p0_4 == (if
(va_get_reg64 rRdi va_s0 = 1) then old_p1_4 else old_p0_4) /\ p0_5 == (if (va_get_reg64 rRdi
va_s0 = 1) then old_p1_5 else old_p0_5) /\ p0_6 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p1_6 else old_p0_6) /\ p0_7 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_7 else
old_p0_7) /\ p1_0 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_0 else old_p1_0) /\ p1_1 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_1 else old_p1_1) /\ p1_2 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_2 else old_p1_2) /\ p1_3 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p0_3 else old_p1_3) /\ p1_4 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_4 else
old_p1_4) /\ p1_5 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_5 else old_p1_5) /\ p1_6 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_6 else old_p1_6) /\ p1_7 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_7 else old_p1_7))) /\ va_state_eq va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_flags va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdi va_sM (va_update_ok
va_sM (va_update_mem va_sM va_s0))))))))))
val va_lemma_Cswap2 : va_b0:va_code -> va_s0:va_state -> bit_in:nat64 -> p0_b:buffer64 ->
p1_b:buffer64
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Cswap2 ()) va_s0 /\ va_get_ok va_s0 /\ (let
(old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in
let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0)
in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem
va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3
(va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let
(old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in
let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0)
in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem
va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3
(va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ bit_in == va_get_reg64 rRdi va_s0 /\
va_get_reg64 rRdi va_s0 <= 1 /\ (Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0)
(va_get_reg64 rRdx va_s0) p1_b 8 (va_get_mem_layout va_s0) Secret)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0)
in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem
va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2
(va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 4 (va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0) in let
(old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in
let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0)
in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem
va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2
(va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 4 (va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0) in let
(old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in
Vale.X64.Decls.modifies_buffer_2 p0_b p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (let p0_0 =
Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_sM) in let p0_1 =
Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let p0_2 =
Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == (if (va_get_reg64 rRdi va_s0
= 1) then old_p1_0 else old_p0_0) /\ p0_1 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_1
else old_p0_1) /\ p0_2 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_2 else old_p0_2) /\
p0_3 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_3 else old_p0_3) /\ p0_4 == (if
(va_get_reg64 rRdi va_s0 = 1) then old_p1_4 else old_p0_4) /\ p0_5 == (if (va_get_reg64 rRdi
va_s0 = 1) then old_p1_5 else old_p0_5) /\ p0_6 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p1_6 else old_p0_6) /\ p0_7 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p1_7 else
old_p0_7) /\ p1_0 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_0 else old_p1_0) /\ p1_1 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_1 else old_p1_1) /\ p1_2 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_2 else old_p1_2) /\ p1_3 == (if (va_get_reg64 rRdi va_s0 = 1) then
old_p0_3 else old_p1_3) /\ p1_4 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_4 else
old_p1_4) /\ p1_5 == (if (va_get_reg64 rRdi va_s0 = 1) then old_p0_5 else old_p1_5) /\ p1_6 ==
(if (va_get_reg64 rRdi va_s0 = 1) then old_p0_6 else old_p1_6) /\ p1_7 == (if (va_get_reg64
rRdi va_s0 = 1) then old_p0_7 else old_p1_7))) /\ va_state_eq va_sM (va_update_mem_layout va_sM
(va_update_mem_heaplet 0 va_sM (va_update_flags va_sM (va_update_reg64 rR10 va_sM
(va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRdi va_sM (va_update_ok
va_sM (va_update_mem va_sM va_s0)))))))))))
[@ va_qattr]
let va_wp_Cswap2 (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) (va_s0:va_state) (va_k:(va_state ->
unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (old_p0_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
1 (va_get_mem va_s0) in let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 2 (va_get_mem va_s0) in let (old_p0_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_s0) in let
(old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_s0) in
let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_s0)
in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem
va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 0
(va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
1 (va_get_mem va_s0) in let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 2 (va_get_mem va_s0) in let (old_p1_3:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_s0) in let
(old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_s0) in
let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_s0)
in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem
va_s0) in Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
bit_in == va_get_reg64 rRdi va_s0 /\ va_get_reg64 rRdi va_s0 <= 1 /\
(Vale.X64.Decls.buffers_disjoint p1_b p0_b \/ p1_b == p0_b) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) (va_get_reg64 rRsi va_s0) p0_b 8 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) (va_get_reg64 rRdx va_s0) p1_b 8
(va_get_mem_layout va_s0) Secret) /\ (forall (va_x_mem:vale_heap) (va_x_rdi:nat64)
(va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_heap0:vale_heap) (va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout
va_x_memLayout (va_upd_mem_heaplet 0 va_x_heap0 (va_upd_flags va_x_efl (va_upd_reg64 rR10
va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRdi va_x_rdi
(va_upd_mem va_x_mem va_s0))))))) in va_get_ok va_sM /\ (let (old_p0_0:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in let
(old_p0_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in
let (old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0)
in let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem
va_s0) in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 4
(va_get_mem va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
5 (va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in let
(old_p1_1:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in
let (old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0)
in let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem
va_s0) in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 4
(va_get_mem va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
5 (va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_s0) in Vale.X64.Decls.modifies_buffer_2 p0_b
p1_b (va_get_mem va_s0) (va_get_mem va_sM) /\ (let p0_0 = Vale.X64.Decls.buffer64_read p0_b 0
(va_get_mem va_sM) in let p0_1 = Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_sM) in let
p0_2 = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_sM) in let p0_3 =
Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_sM) in let p0_4 =
Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem va_sM) in let p0_5 =
Vale.X64.Decls.buffer64_read p0_b 5 (va_get_mem va_sM) in let p0_6 =
Vale.X64.Decls.buffer64_read p0_b 6 (va_get_mem va_sM) in let p0_7 =
Vale.X64.Decls.buffer64_read p0_b 7 (va_get_mem va_sM) in let p1_0 =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_sM) in let p1_1 =
Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_sM) in let p1_2 =
Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_sM) in let p1_3 =
Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_sM) in let p1_4 =
Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem va_sM) in let p1_5 =
Vale.X64.Decls.buffer64_read p1_b 5 (va_get_mem va_sM) in let p1_6 =
Vale.X64.Decls.buffer64_read p1_b 6 (va_get_mem va_sM) in let p1_7 =
Vale.X64.Decls.buffer64_read p1_b 7 (va_get_mem va_sM) in p0_0 == va_if (va_get_reg64 rRdi
va_s0 = 1) (fun _ -> old_p1_0) (fun _ -> old_p0_0) /\ p0_1 == va_if (va_get_reg64 rRdi va_s0 =
1) (fun _ -> old_p1_1) (fun _ -> old_p0_1) /\ p0_2 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun
_ -> old_p1_2) (fun _ -> old_p0_2) /\ p0_3 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_3) (fun _ -> old_p0_3) /\ p0_4 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_4) (fun _ -> old_p0_4) /\ p0_5 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_5) (fun _ -> old_p0_5) /\ p0_6 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_6) (fun _ -> old_p0_6) /\ p0_7 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p1_7) (fun _ -> old_p0_7) /\ p1_0 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_0) (fun _ -> old_p1_0) /\ p1_1 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_1) (fun _ -> old_p1_1) /\ p1_2 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_2) (fun _ -> old_p1_2) /\ p1_3 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_3) (fun _ -> old_p1_3) /\ p1_4 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_4) (fun _ -> old_p1_4) /\ p1_5 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_5) (fun _ -> old_p1_5) /\ p1_6 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_6) (fun _ -> old_p1_6) /\ p1_7 == va_if (va_get_reg64 rRdi va_s0 = 1) (fun _ ->
old_p0_7) (fun _ -> old_p1_7))) ==> va_k va_sM (())))
val va_wpProof_Cswap2 : bit_in:nat64 -> p0_b:buffer64 -> p1_b:buffer64 -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Cswap2 bit_in p0_b p1_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Cswap2 ()) ([va_Mod_mem_layout;
va_Mod_mem_heaplet 0; va_Mod_flags; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8;
va_Mod_reg64 rRdi; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Cswap2 (bit_in:nat64) (p0_b:buffer64) (p1_b:buffer64) : (va_quickCode unit
(va_code_Cswap2 ())) =
(va_QProc (va_code_Cswap2 ()) ([va_Mod_mem_layout; va_Mod_mem_heaplet 0; va_Mod_flags;
va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRdi; va_Mod_mem])
(va_wp_Cswap2 bit_in p0_b p1_b) (va_wpProof_Cswap2 bit_in p0_b p1_b))
//--
//-- Cswap2_stdcall
val va_code_Cswap2_stdcall : win:bool -> Tot va_code
val va_codegen_success_Cswap2_stdcall : win:bool -> Tot va_pbool
let va_req_Cswap2_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (bit_in:nat64)
(p0_b:buffer64) (p1_b:buffer64) : prop =
(va_require_total va_b0 (va_code_Cswap2_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(p0_in:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0 else
va_get_reg64 rRsi va_s0) in let (p1_in:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (old_p0_0:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p0_b 0 (va_get_mem va_s0) in let (old_p0_1:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p0_b 1 (va_get_mem va_s0) in let
(old_p0_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 2 (va_get_mem va_s0) in
let (old_p0_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 3 (va_get_mem va_s0)
in let (old_p0_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 4 (va_get_mem
va_s0) in let (old_p0_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b 5
(va_get_mem va_s0) in let (old_p0_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p0_b
6 (va_get_mem va_s0) in let (old_p0_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p0_b 7 (va_get_mem va_s0) in let (old_p1_0:Vale.Def.Types_s.nat64) =
Vale.X64.Decls.buffer64_read p1_b 0 (va_get_mem va_s0) in let (old_p1_1:Vale.Def.Types_s.nat64)
= Vale.X64.Decls.buffer64_read p1_b 1 (va_get_mem va_s0) in let
(old_p1_2:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 2 (va_get_mem va_s0) in
let (old_p1_3:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 3 (va_get_mem va_s0)
in let (old_p1_4:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 4 (va_get_mem
va_s0) in let (old_p1_5:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b 5
(va_get_mem va_s0) in let (old_p1_6:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read p1_b
6 (va_get_mem va_s0) in let (old_p1_7:Vale.Def.Types_s.nat64) = Vale.X64.Decls.buffer64_read
p1_b 7 (va_get_mem va_s0) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack
va_s0) /\ Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
bit_in <= 1 /\ bit_in = (if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) /\
(Vale.X64.Decls.buffers_disjoint p0_b p1_b \/ p1_b == p0_b) /\ Vale.X64.Decls.validDstAddrs64
(va_get_mem va_s0) p0_in p0_b 8 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs64 (va_get_mem va_s0) p1_in p1_b 8 (va_get_mem_layout va_s0)
Secret))
let va_ens_Cswap2_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (bit_in:nat64) | false | true | Vale.Curve25519.X64.FastUtil.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_ens_Cswap2_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(bit_in: nat64)
(p0_b p1_b: buffer64)
(va_sM: va_state)
(va_fM: va_fuel)
: prop | [] | Vale.Curve25519.X64.FastUtil.va_ens_Cswap2_stdcall | {
"file_name": "obj/Vale.Curve25519.X64.FastUtil.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
va_b0: Vale.X64.Decls.va_code ->
va_s0: Vale.X64.Decls.va_state ->
win: Prims.bool ->
bit_in: Vale.X64.Memory.nat64 ->
p0_b: Vale.X64.Memory.buffer64 ->
p1_b: Vale.X64.Memory.buffer64 ->
va_sM: Vale.X64.Decls.va_state ->
va_fM: Vale.X64.Decls.va_fuel
-> Prims.prop | {
"end_col": 93,
"end_line": 655,
"start_col": 2,
"start_line": 601
} |
Prims.Tot | val is_gctr_plain_LE (p: seq nat8) : prop0 | [
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let is_gctr_plain_LE (p:seq nat8) : prop0 = length p < pow2_32 | val is_gctr_plain_LE (p: seq nat8) : prop0
let is_gctr_plain_LE (p: seq nat8) : prop0 = | false | null | false | length p < pow2_32 | {
"checked_file": "Vale.AES.GCTR_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCTR_s.fst"
} | [
"total"
] | [
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat8",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.Seq.Base.length",
"Vale.Def.Words_s.pow2_32",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.AES.GCTR_s
// IMPORTANT: Following NIST's specification, this spec is written assuming a big-endian mapping from bytes to quad32s
// Since the AES spec (AES_s) is in little-endian, we need to byteswap each time we call AES
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open FStar.Mul
open Vale.AES.AES_s
open FStar.Seq
// The max length of pow2_32 corresponds to the max length of buffers in Low* | false | true | Vale.AES.GCTR_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val is_gctr_plain_LE (p: seq nat8) : prop0 | [] | Vale.AES.GCTR_s.is_gctr_plain_LE | {
"file_name": "vale/specs/crypto/Vale.AES.GCTR_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | p: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 -> Vale.Def.Prop_s.prop0 | {
"end_col": 62,
"end_line": 16,
"start_col": 44,
"start_line": 16
} |
FStar.Pervasives.Lemma | [
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gctr_encrypt_LE_reveal = opaque_revealer (`%gctr_encrypt_LE) gctr_encrypt_LE gctr_encrypt_LE_def | let gctr_encrypt_LE_reveal = | false | null | true | opaque_revealer (`%gctr_encrypt_LE) gctr_encrypt_LE gctr_encrypt_LE_def | {
"checked_file": "Vale.AES.GCTR_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCTR_s.fst"
} | [
"lemma"
] | [
"Vale.Def.Opaque_s.opaque_revealer",
"Vale.Def.Types_s.quad32",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat8",
"Vale.AES.AES_common_s.algorithm",
"Vale.Def.Types_s.nat32",
"Prims.l_and",
"Vale.AES.GCTR_s.is_gctr_plain_LE",
"Vale.AES.AES_s.is_aes_key_LE",
"Prims.l_True",
"Vale.AES.GCTR_s.gctr_encrypt_LE",
"Vale.AES.GCTR_s.gctr_encrypt_LE_def"
] | [] | module Vale.AES.GCTR_s
// IMPORTANT: Following NIST's specification, this spec is written assuming a big-endian mapping from bytes to quad32s
// Since the AES spec (AES_s) is in little-endian, we need to byteswap each time we call AES
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open FStar.Mul
open Vale.AES.AES_s
open FStar.Seq
// The max length of pow2_32 corresponds to the max length of buffers in Low*
// length plain < pow2_32 <= spec max of 2**39 - 256;
let is_gctr_plain_LE (p:seq nat8) : prop0 = length p < pow2_32
type gctr_plain_LE:eqtype = p:seq nat8 { is_gctr_plain_LE p }
type gctr_plain_internal_LE:eqtype = seq quad32
let inc32 (cb:quad32) (i:int) : quad32 =
Mkfour ((cb.lo0 + i) % pow2_32) cb.lo1 cb.hi2 cb.hi3
let gctr_encrypt_block (icb_BE:quad32) (plain_LE:quad32) (alg:algorithm) (key:seq nat32) (i:int) : Pure quad32
(requires is_aes_key_LE alg key)
(ensures fun _ -> True)
=
let icb_LE = reverse_bytes_quad32 (inc32 icb_BE i) in
quad32_xor plain_LE (aes_encrypt_LE alg key icb_LE)
let rec gctr_encrypt_recursive (icb_BE:quad32) (plain:gctr_plain_internal_LE)
(alg:algorithm) (key:aes_key_LE alg) (i:int) : Tot (seq quad32) (decreases %[length plain]) =
if length plain = 0 then empty
else
cons (gctr_encrypt_block icb_BE (head plain) alg key i) (gctr_encrypt_recursive icb_BE (tail plain) alg key (i + 1))
let pad_to_128_bits (p:seq nat8) : Pure (seq nat8)
(requires True)
(ensures fun q -> length q % 16 == 0 /\ length q <= length p + 15)
=
let num_extra_bytes = length p % 16 in
if num_extra_bytes = 0 then p
else p @| (create (16 - num_extra_bytes) 0)
// little-endian, except for icb_BE
let gctr_encrypt_LE_def (icb_BE:quad32) (plain:seq nat8) (alg:algorithm) (key:seq nat32) : Pure (seq nat8)
(requires is_gctr_plain_LE plain /\ is_aes_key_LE alg key)
(ensures fun _ -> True)
=
let num_extra = (length plain) % 16 in
if num_extra = 0 then
let plain_quads_LE = le_bytes_to_seq_quad32 plain in
let cipher_quads_LE = gctr_encrypt_recursive icb_BE plain_quads_LE alg key 0 in
le_seq_quad32_to_bytes cipher_quads_LE
else
let full_bytes_len = (length plain) - num_extra in
let full_blocks, final_block = split plain full_bytes_len in
let full_quads_LE = le_bytes_to_seq_quad32 full_blocks in
let final_quad_LE = le_bytes_to_quad32 (pad_to_128_bits final_block) in
let cipher_quads_LE = gctr_encrypt_recursive icb_BE full_quads_LE alg key 0 in
let final_cipher_quad_LE = gctr_encrypt_block icb_BE final_quad_LE alg key (full_bytes_len / 16) in
let cipher_bytes_full_LE = le_seq_quad32_to_bytes cipher_quads_LE in
let final_cipher_bytes_LE = slice (le_quad32_to_bytes final_cipher_quad_LE) 0 num_extra in
cipher_bytes_full_LE @| final_cipher_bytes_LE | false | false | Vale.AES.GCTR_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gctr_encrypt_LE_reveal : _: Prims.unit
-> FStar.Pervasives.Lemma
(ensures Vale.AES.GCTR_s.gctr_encrypt_LE == Vale.AES.GCTR_s.gctr_encrypt_LE_def) | [] | Vale.AES.GCTR_s.gctr_encrypt_LE_reveal | {
"file_name": "vale/specs/crypto/Vale.AES.GCTR_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | _: Prims.unit
-> FStar.Pervasives.Lemma
(ensures Vale.AES.GCTR_s.gctr_encrypt_LE == Vale.AES.GCTR_s.gctr_encrypt_LE_def) | {
"end_col": 112,
"end_line": 71,
"start_col": 41,
"start_line": 71
} |
|
Prims.Pure | [
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gctr_encrypt_LE = opaque_make gctr_encrypt_LE_def | let gctr_encrypt_LE = | false | null | false | opaque_make gctr_encrypt_LE_def | {
"checked_file": "Vale.AES.GCTR_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCTR_s.fst"
} | [] | [
"Vale.Def.Opaque_s.opaque_make",
"Vale.Def.Types_s.quad32",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat8",
"Vale.AES.AES_common_s.algorithm",
"Vale.Def.Types_s.nat32",
"Prims.l_and",
"Vale.AES.GCTR_s.is_gctr_plain_LE",
"Vale.AES.AES_s.is_aes_key_LE",
"Prims.l_True",
"Vale.AES.GCTR_s.gctr_encrypt_LE_def"
] | [] | module Vale.AES.GCTR_s
// IMPORTANT: Following NIST's specification, this spec is written assuming a big-endian mapping from bytes to quad32s
// Since the AES spec (AES_s) is in little-endian, we need to byteswap each time we call AES
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open FStar.Mul
open Vale.AES.AES_s
open FStar.Seq
// The max length of pow2_32 corresponds to the max length of buffers in Low*
// length plain < pow2_32 <= spec max of 2**39 - 256;
let is_gctr_plain_LE (p:seq nat8) : prop0 = length p < pow2_32
type gctr_plain_LE:eqtype = p:seq nat8 { is_gctr_plain_LE p }
type gctr_plain_internal_LE:eqtype = seq quad32
let inc32 (cb:quad32) (i:int) : quad32 =
Mkfour ((cb.lo0 + i) % pow2_32) cb.lo1 cb.hi2 cb.hi3
let gctr_encrypt_block (icb_BE:quad32) (plain_LE:quad32) (alg:algorithm) (key:seq nat32) (i:int) : Pure quad32
(requires is_aes_key_LE alg key)
(ensures fun _ -> True)
=
let icb_LE = reverse_bytes_quad32 (inc32 icb_BE i) in
quad32_xor plain_LE (aes_encrypt_LE alg key icb_LE)
let rec gctr_encrypt_recursive (icb_BE:quad32) (plain:gctr_plain_internal_LE)
(alg:algorithm) (key:aes_key_LE alg) (i:int) : Tot (seq quad32) (decreases %[length plain]) =
if length plain = 0 then empty
else
cons (gctr_encrypt_block icb_BE (head plain) alg key i) (gctr_encrypt_recursive icb_BE (tail plain) alg key (i + 1))
let pad_to_128_bits (p:seq nat8) : Pure (seq nat8)
(requires True)
(ensures fun q -> length q % 16 == 0 /\ length q <= length p + 15)
=
let num_extra_bytes = length p % 16 in
if num_extra_bytes = 0 then p
else p @| (create (16 - num_extra_bytes) 0)
// little-endian, except for icb_BE
let gctr_encrypt_LE_def (icb_BE:quad32) (plain:seq nat8) (alg:algorithm) (key:seq nat32) : Pure (seq nat8)
(requires is_gctr_plain_LE plain /\ is_aes_key_LE alg key)
(ensures fun _ -> True)
=
let num_extra = (length plain) % 16 in
if num_extra = 0 then
let plain_quads_LE = le_bytes_to_seq_quad32 plain in
let cipher_quads_LE = gctr_encrypt_recursive icb_BE plain_quads_LE alg key 0 in
le_seq_quad32_to_bytes cipher_quads_LE
else
let full_bytes_len = (length plain) - num_extra in
let full_blocks, final_block = split plain full_bytes_len in
let full_quads_LE = le_bytes_to_seq_quad32 full_blocks in
let final_quad_LE = le_bytes_to_quad32 (pad_to_128_bits final_block) in
let cipher_quads_LE = gctr_encrypt_recursive icb_BE full_quads_LE alg key 0 in
let final_cipher_quad_LE = gctr_encrypt_block icb_BE final_quad_LE alg key (full_bytes_len / 16) in
let cipher_bytes_full_LE = le_seq_quad32_to_bytes cipher_quads_LE in
let final_cipher_bytes_LE = slice (le_quad32_to_bytes final_cipher_quad_LE) 0 num_extra in | false | false | Vale.AES.GCTR_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gctr_encrypt_LE : icb_BE: Vale.Def.Types_s.quad32 ->
plain: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 ->
alg: Vale.AES.AES_common_s.algorithm ->
key: FStar.Seq.Base.seq Vale.Def.Types_s.nat32
-> Prims.Pure (FStar.Seq.Base.seq Vale.Def.Types_s.nat8) | [] | Vale.AES.GCTR_s.gctr_encrypt_LE | {
"file_name": "vale/specs/crypto/Vale.AES.GCTR_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
icb_BE: Vale.Def.Types_s.quad32 ->
plain: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 ->
alg: Vale.AES.AES_common_s.algorithm ->
key: FStar.Seq.Base.seq Vale.Def.Types_s.nat32
-> Prims.Pure (FStar.Seq.Base.seq Vale.Def.Types_s.nat8) | {
"end_col": 73,
"end_line": 70,
"start_col": 42,
"start_line": 70
} |
|
Prims.Tot | val inc32 (cb: quad32) (i: int) : quad32 | [
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let inc32 (cb:quad32) (i:int) : quad32 =
Mkfour ((cb.lo0 + i) % pow2_32) cb.lo1 cb.hi2 cb.hi3 | val inc32 (cb: quad32) (i: int) : quad32
let inc32 (cb: quad32) (i: int) : quad32 = | false | null | false | Mkfour ((cb.lo0 + i) % pow2_32) cb.lo1 cb.hi2 cb.hi3 | {
"checked_file": "Vale.AES.GCTR_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCTR_s.fst"
} | [
"total"
] | [
"Vale.Def.Types_s.quad32",
"Prims.int",
"Vale.Def.Words_s.Mkfour",
"Vale.Def.Types_s.nat32",
"Prims.op_Modulus",
"Prims.op_Addition",
"Vale.Def.Words_s.__proj__Mkfour__item__lo0",
"Vale.Def.Words_s.pow2_32",
"Vale.Def.Words_s.__proj__Mkfour__item__lo1",
"Vale.Def.Words_s.__proj__Mkfour__item__hi2",
"Vale.Def.Words_s.__proj__Mkfour__item__hi3"
] | [] | module Vale.AES.GCTR_s
// IMPORTANT: Following NIST's specification, this spec is written assuming a big-endian mapping from bytes to quad32s
// Since the AES spec (AES_s) is in little-endian, we need to byteswap each time we call AES
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open FStar.Mul
open Vale.AES.AES_s
open FStar.Seq
// The max length of pow2_32 corresponds to the max length of buffers in Low*
// length plain < pow2_32 <= spec max of 2**39 - 256;
let is_gctr_plain_LE (p:seq nat8) : prop0 = length p < pow2_32
type gctr_plain_LE:eqtype = p:seq nat8 { is_gctr_plain_LE p }
type gctr_plain_internal_LE:eqtype = seq quad32 | false | true | Vale.AES.GCTR_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val inc32 (cb: quad32) (i: int) : quad32 | [] | Vale.AES.GCTR_s.inc32 | {
"file_name": "vale/specs/crypto/Vale.AES.GCTR_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | cb: Vale.Def.Types_s.quad32 -> i: Prims.int -> Vale.Def.Types_s.quad32 | {
"end_col": 54,
"end_line": 21,
"start_col": 2,
"start_line": 21
} |
Prims.Pure | val gctr_encrypt_block (icb_BE plain_LE: quad32) (alg: algorithm) (key: seq nat32) (i: int)
: Pure quad32 (requires is_aes_key_LE alg key) (ensures fun _ -> True) | [
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gctr_encrypt_block (icb_BE:quad32) (plain_LE:quad32) (alg:algorithm) (key:seq nat32) (i:int) : Pure quad32
(requires is_aes_key_LE alg key)
(ensures fun _ -> True)
=
let icb_LE = reverse_bytes_quad32 (inc32 icb_BE i) in
quad32_xor plain_LE (aes_encrypt_LE alg key icb_LE) | val gctr_encrypt_block (icb_BE plain_LE: quad32) (alg: algorithm) (key: seq nat32) (i: int)
: Pure quad32 (requires is_aes_key_LE alg key) (ensures fun _ -> True)
let gctr_encrypt_block (icb_BE plain_LE: quad32) (alg: algorithm) (key: seq nat32) (i: int)
: Pure quad32 (requires is_aes_key_LE alg key) (ensures fun _ -> True) = | false | null | false | let icb_LE = reverse_bytes_quad32 (inc32 icb_BE i) in
quad32_xor plain_LE (aes_encrypt_LE alg key icb_LE) | {
"checked_file": "Vale.AES.GCTR_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCTR_s.fst"
} | [] | [
"Vale.Def.Types_s.quad32",
"Vale.AES.AES_common_s.algorithm",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat32",
"Prims.int",
"Vale.Def.Types_s.quad32_xor",
"Vale.AES.AES_s.aes_encrypt_LE",
"Vale.Def.Types_s.reverse_bytes_quad32",
"Vale.AES.GCTR_s.inc32",
"Vale.AES.AES_s.is_aes_key_LE",
"Prims.l_True"
] | [] | module Vale.AES.GCTR_s
// IMPORTANT: Following NIST's specification, this spec is written assuming a big-endian mapping from bytes to quad32s
// Since the AES spec (AES_s) is in little-endian, we need to byteswap each time we call AES
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open FStar.Mul
open Vale.AES.AES_s
open FStar.Seq
// The max length of pow2_32 corresponds to the max length of buffers in Low*
// length plain < pow2_32 <= spec max of 2**39 - 256;
let is_gctr_plain_LE (p:seq nat8) : prop0 = length p < pow2_32
type gctr_plain_LE:eqtype = p:seq nat8 { is_gctr_plain_LE p }
type gctr_plain_internal_LE:eqtype = seq quad32
let inc32 (cb:quad32) (i:int) : quad32 =
Mkfour ((cb.lo0 + i) % pow2_32) cb.lo1 cb.hi2 cb.hi3
let gctr_encrypt_block (icb_BE:quad32) (plain_LE:quad32) (alg:algorithm) (key:seq nat32) (i:int) : Pure quad32
(requires is_aes_key_LE alg key) | false | false | Vale.AES.GCTR_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gctr_encrypt_block (icb_BE plain_LE: quad32) (alg: algorithm) (key: seq nat32) (i: int)
: Pure quad32 (requires is_aes_key_LE alg key) (ensures fun _ -> True) | [] | Vale.AES.GCTR_s.gctr_encrypt_block | {
"file_name": "vale/specs/crypto/Vale.AES.GCTR_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
icb_BE: Vale.Def.Types_s.quad32 ->
plain_LE: Vale.Def.Types_s.quad32 ->
alg: Vale.AES.AES_common_s.algorithm ->
key: FStar.Seq.Base.seq Vale.Def.Types_s.nat32 ->
i: Prims.int
-> Prims.Pure Vale.Def.Types_s.quad32 | {
"end_col": 53,
"end_line": 28,
"start_col": 3,
"start_line": 26
} |
Prims.Pure | val pad_to_128_bits (p: seq nat8)
: Pure (seq nat8)
(requires True)
(ensures fun q -> length q % 16 == 0 /\ length q <= length p + 15) | [
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let pad_to_128_bits (p:seq nat8) : Pure (seq nat8)
(requires True)
(ensures fun q -> length q % 16 == 0 /\ length q <= length p + 15)
=
let num_extra_bytes = length p % 16 in
if num_extra_bytes = 0 then p
else p @| (create (16 - num_extra_bytes) 0) | val pad_to_128_bits (p: seq nat8)
: Pure (seq nat8)
(requires True)
(ensures fun q -> length q % 16 == 0 /\ length q <= length p + 15)
let pad_to_128_bits (p: seq nat8)
: Pure (seq nat8)
(requires True)
(ensures fun q -> length q % 16 == 0 /\ length q <= length p + 15) = | false | null | false | let num_extra_bytes = length p % 16 in
if num_extra_bytes = 0 then p else p @| (create (16 - num_extra_bytes) 0) | {
"checked_file": "Vale.AES.GCTR_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCTR_s.fst"
} | [] | [
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat8",
"Prims.op_Equality",
"Prims.int",
"Prims.bool",
"FStar.Seq.Base.op_At_Bar",
"FStar.Seq.Base.create",
"Prims.op_Subtraction",
"Prims.op_Modulus",
"FStar.Seq.Base.length",
"Prims.l_True",
"Prims.l_and",
"Prims.eq2",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition"
] | [] | module Vale.AES.GCTR_s
// IMPORTANT: Following NIST's specification, this spec is written assuming a big-endian mapping from bytes to quad32s
// Since the AES spec (AES_s) is in little-endian, we need to byteswap each time we call AES
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open FStar.Mul
open Vale.AES.AES_s
open FStar.Seq
// The max length of pow2_32 corresponds to the max length of buffers in Low*
// length plain < pow2_32 <= spec max of 2**39 - 256;
let is_gctr_plain_LE (p:seq nat8) : prop0 = length p < pow2_32
type gctr_plain_LE:eqtype = p:seq nat8 { is_gctr_plain_LE p }
type gctr_plain_internal_LE:eqtype = seq quad32
let inc32 (cb:quad32) (i:int) : quad32 =
Mkfour ((cb.lo0 + i) % pow2_32) cb.lo1 cb.hi2 cb.hi3
let gctr_encrypt_block (icb_BE:quad32) (plain_LE:quad32) (alg:algorithm) (key:seq nat32) (i:int) : Pure quad32
(requires is_aes_key_LE alg key)
(ensures fun _ -> True)
=
let icb_LE = reverse_bytes_quad32 (inc32 icb_BE i) in
quad32_xor plain_LE (aes_encrypt_LE alg key icb_LE)
let rec gctr_encrypt_recursive (icb_BE:quad32) (plain:gctr_plain_internal_LE)
(alg:algorithm) (key:aes_key_LE alg) (i:int) : Tot (seq quad32) (decreases %[length plain]) =
if length plain = 0 then empty
else
cons (gctr_encrypt_block icb_BE (head plain) alg key i) (gctr_encrypt_recursive icb_BE (tail plain) alg key (i + 1))
let pad_to_128_bits (p:seq nat8) : Pure (seq nat8)
(requires True) | false | false | Vale.AES.GCTR_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val pad_to_128_bits (p: seq nat8)
: Pure (seq nat8)
(requires True)
(ensures fun q -> length q % 16 == 0 /\ length q <= length p + 15) | [] | Vale.AES.GCTR_s.pad_to_128_bits | {
"file_name": "vale/specs/crypto/Vale.AES.GCTR_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | p: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 -> Prims.Pure (FStar.Seq.Base.seq Vale.Def.Types_s.nat8) | {
"end_col": 45,
"end_line": 43,
"start_col": 3,
"start_line": 40
} |
Prims.Tot | val gctr_encrypt_recursive
(icb_BE: quad32)
(plain: gctr_plain_internal_LE)
(alg: algorithm)
(key: aes_key_LE alg)
(i: int)
: Tot (seq quad32) (decreases %[length plain]) | [
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec gctr_encrypt_recursive (icb_BE:quad32) (plain:gctr_plain_internal_LE)
(alg:algorithm) (key:aes_key_LE alg) (i:int) : Tot (seq quad32) (decreases %[length plain]) =
if length plain = 0 then empty
else
cons (gctr_encrypt_block icb_BE (head plain) alg key i) (gctr_encrypt_recursive icb_BE (tail plain) alg key (i + 1)) | val gctr_encrypt_recursive
(icb_BE: quad32)
(plain: gctr_plain_internal_LE)
(alg: algorithm)
(key: aes_key_LE alg)
(i: int)
: Tot (seq quad32) (decreases %[length plain])
let rec gctr_encrypt_recursive
(icb_BE: quad32)
(plain: gctr_plain_internal_LE)
(alg: algorithm)
(key: aes_key_LE alg)
(i: int)
: Tot (seq quad32) (decreases %[length plain]) = | false | null | false | if length plain = 0
then empty
else
cons (gctr_encrypt_block icb_BE (head plain) alg key i)
(gctr_encrypt_recursive icb_BE (tail plain) alg key (i + 1)) | {
"checked_file": "Vale.AES.GCTR_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCTR_s.fst"
} | [
"total",
""
] | [
"Vale.Def.Types_s.quad32",
"Vale.AES.GCTR_s.gctr_plain_internal_LE",
"Vale.AES.AES_common_s.algorithm",
"Vale.AES.AES_s.aes_key_LE",
"Prims.int",
"Prims.op_Equality",
"FStar.Seq.Base.length",
"FStar.Seq.Base.empty",
"Prims.bool",
"FStar.Seq.Properties.cons",
"Vale.AES.GCTR_s.gctr_encrypt_block",
"FStar.Seq.Properties.head",
"Vale.AES.GCTR_s.gctr_encrypt_recursive",
"FStar.Seq.Properties.tail",
"Prims.op_Addition",
"FStar.Seq.Base.seq"
] | [] | module Vale.AES.GCTR_s
// IMPORTANT: Following NIST's specification, this spec is written assuming a big-endian mapping from bytes to quad32s
// Since the AES spec (AES_s) is in little-endian, we need to byteswap each time we call AES
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open FStar.Mul
open Vale.AES.AES_s
open FStar.Seq
// The max length of pow2_32 corresponds to the max length of buffers in Low*
// length plain < pow2_32 <= spec max of 2**39 - 256;
let is_gctr_plain_LE (p:seq nat8) : prop0 = length p < pow2_32
type gctr_plain_LE:eqtype = p:seq nat8 { is_gctr_plain_LE p }
type gctr_plain_internal_LE:eqtype = seq quad32
let inc32 (cb:quad32) (i:int) : quad32 =
Mkfour ((cb.lo0 + i) % pow2_32) cb.lo1 cb.hi2 cb.hi3
let gctr_encrypt_block (icb_BE:quad32) (plain_LE:quad32) (alg:algorithm) (key:seq nat32) (i:int) : Pure quad32
(requires is_aes_key_LE alg key)
(ensures fun _ -> True)
=
let icb_LE = reverse_bytes_quad32 (inc32 icb_BE i) in
quad32_xor plain_LE (aes_encrypt_LE alg key icb_LE)
let rec gctr_encrypt_recursive (icb_BE:quad32) (plain:gctr_plain_internal_LE) | false | false | Vale.AES.GCTR_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gctr_encrypt_recursive
(icb_BE: quad32)
(plain: gctr_plain_internal_LE)
(alg: algorithm)
(key: aes_key_LE alg)
(i: int)
: Tot (seq quad32) (decreases %[length plain]) | [
"recursion"
] | Vale.AES.GCTR_s.gctr_encrypt_recursive | {
"file_name": "vale/specs/crypto/Vale.AES.GCTR_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
icb_BE: Vale.Def.Types_s.quad32 ->
plain: Vale.AES.GCTR_s.gctr_plain_internal_LE ->
alg: Vale.AES.AES_common_s.algorithm ->
key: Vale.AES.AES_s.aes_key_LE alg ->
i: Prims.int
-> Prims.Tot (FStar.Seq.Base.seq Vale.Def.Types_s.quad32) | {
"end_col": 120,
"end_line": 35,
"start_col": 2,
"start_line": 33
} |
Prims.Pure | val gctr_encrypt_LE_def (icb_BE: quad32) (plain: seq nat8) (alg: algorithm) (key: seq nat32)
: Pure (seq nat8)
(requires is_gctr_plain_LE plain /\ is_aes_key_LE alg key)
(ensures fun _ -> True) | [
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gctr_encrypt_LE_def (icb_BE:quad32) (plain:seq nat8) (alg:algorithm) (key:seq nat32) : Pure (seq nat8)
(requires is_gctr_plain_LE plain /\ is_aes_key_LE alg key)
(ensures fun _ -> True)
=
let num_extra = (length plain) % 16 in
if num_extra = 0 then
let plain_quads_LE = le_bytes_to_seq_quad32 plain in
let cipher_quads_LE = gctr_encrypt_recursive icb_BE plain_quads_LE alg key 0 in
le_seq_quad32_to_bytes cipher_quads_LE
else
let full_bytes_len = (length plain) - num_extra in
let full_blocks, final_block = split plain full_bytes_len in
let full_quads_LE = le_bytes_to_seq_quad32 full_blocks in
let final_quad_LE = le_bytes_to_quad32 (pad_to_128_bits final_block) in
let cipher_quads_LE = gctr_encrypt_recursive icb_BE full_quads_LE alg key 0 in
let final_cipher_quad_LE = gctr_encrypt_block icb_BE final_quad_LE alg key (full_bytes_len / 16) in
let cipher_bytes_full_LE = le_seq_quad32_to_bytes cipher_quads_LE in
let final_cipher_bytes_LE = slice (le_quad32_to_bytes final_cipher_quad_LE) 0 num_extra in
cipher_bytes_full_LE @| final_cipher_bytes_LE | val gctr_encrypt_LE_def (icb_BE: quad32) (plain: seq nat8) (alg: algorithm) (key: seq nat32)
: Pure (seq nat8)
(requires is_gctr_plain_LE plain /\ is_aes_key_LE alg key)
(ensures fun _ -> True)
let gctr_encrypt_LE_def (icb_BE: quad32) (plain: seq nat8) (alg: algorithm) (key: seq nat32)
: Pure (seq nat8)
(requires is_gctr_plain_LE plain /\ is_aes_key_LE alg key)
(ensures fun _ -> True) = | false | null | false | let num_extra = (length plain) % 16 in
if num_extra = 0
then
let plain_quads_LE = le_bytes_to_seq_quad32 plain in
let cipher_quads_LE = gctr_encrypt_recursive icb_BE plain_quads_LE alg key 0 in
le_seq_quad32_to_bytes cipher_quads_LE
else
let full_bytes_len = (length plain) - num_extra in
let full_blocks, final_block = split plain full_bytes_len in
let full_quads_LE = le_bytes_to_seq_quad32 full_blocks in
let final_quad_LE = le_bytes_to_quad32 (pad_to_128_bits final_block) in
let cipher_quads_LE = gctr_encrypt_recursive icb_BE full_quads_LE alg key 0 in
let final_cipher_quad_LE =
gctr_encrypt_block icb_BE final_quad_LE alg key (full_bytes_len / 16)
in
let cipher_bytes_full_LE = le_seq_quad32_to_bytes cipher_quads_LE in
let final_cipher_bytes_LE = slice (le_quad32_to_bytes final_cipher_quad_LE) 0 num_extra in
cipher_bytes_full_LE @| final_cipher_bytes_LE | {
"checked_file": "Vale.AES.GCTR_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCTR_s.fst"
} | [] | [
"Vale.Def.Types_s.quad32",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat8",
"Vale.AES.AES_common_s.algorithm",
"Vale.Def.Types_s.nat32",
"Prims.op_Equality",
"Prims.int",
"Vale.Def.Types_s.le_seq_quad32_to_bytes",
"Vale.AES.GCTR_s.gctr_encrypt_recursive",
"Vale.Def.Types_s.le_bytes_to_seq_quad32",
"Prims.bool",
"FStar.Seq.Base.op_At_Bar",
"Vale.Def.Words_s.nat8",
"FStar.Seq.Base.slice",
"Vale.Def.Types_s.le_quad32_to_bytes",
"Vale.AES.GCTR_s.gctr_encrypt_block",
"Prims.op_Division",
"Vale.Def.Types_s.le_bytes_to_quad32",
"Vale.AES.GCTR_s.pad_to_128_bits",
"FStar.Pervasives.Native.tuple2",
"FStar.Seq.Properties.split",
"Prims.op_Subtraction",
"FStar.Seq.Base.length",
"Prims.op_Modulus",
"Prims.l_and",
"Vale.AES.GCTR_s.is_gctr_plain_LE",
"Vale.AES.AES_s.is_aes_key_LE",
"Prims.l_True"
] | [] | module Vale.AES.GCTR_s
// IMPORTANT: Following NIST's specification, this spec is written assuming a big-endian mapping from bytes to quad32s
// Since the AES spec (AES_s) is in little-endian, we need to byteswap each time we call AES
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Types_s
open FStar.Mul
open Vale.AES.AES_s
open FStar.Seq
// The max length of pow2_32 corresponds to the max length of buffers in Low*
// length plain < pow2_32 <= spec max of 2**39 - 256;
let is_gctr_plain_LE (p:seq nat8) : prop0 = length p < pow2_32
type gctr_plain_LE:eqtype = p:seq nat8 { is_gctr_plain_LE p }
type gctr_plain_internal_LE:eqtype = seq quad32
let inc32 (cb:quad32) (i:int) : quad32 =
Mkfour ((cb.lo0 + i) % pow2_32) cb.lo1 cb.hi2 cb.hi3
let gctr_encrypt_block (icb_BE:quad32) (plain_LE:quad32) (alg:algorithm) (key:seq nat32) (i:int) : Pure quad32
(requires is_aes_key_LE alg key)
(ensures fun _ -> True)
=
let icb_LE = reverse_bytes_quad32 (inc32 icb_BE i) in
quad32_xor plain_LE (aes_encrypt_LE alg key icb_LE)
let rec gctr_encrypt_recursive (icb_BE:quad32) (plain:gctr_plain_internal_LE)
(alg:algorithm) (key:aes_key_LE alg) (i:int) : Tot (seq quad32) (decreases %[length plain]) =
if length plain = 0 then empty
else
cons (gctr_encrypt_block icb_BE (head plain) alg key i) (gctr_encrypt_recursive icb_BE (tail plain) alg key (i + 1))
let pad_to_128_bits (p:seq nat8) : Pure (seq nat8)
(requires True)
(ensures fun q -> length q % 16 == 0 /\ length q <= length p + 15)
=
let num_extra_bytes = length p % 16 in
if num_extra_bytes = 0 then p
else p @| (create (16 - num_extra_bytes) 0)
// little-endian, except for icb_BE
let gctr_encrypt_LE_def (icb_BE:quad32) (plain:seq nat8) (alg:algorithm) (key:seq nat32) : Pure (seq nat8)
(requires is_gctr_plain_LE plain /\ is_aes_key_LE alg key) | false | false | Vale.AES.GCTR_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gctr_encrypt_LE_def (icb_BE: quad32) (plain: seq nat8) (alg: algorithm) (key: seq nat32)
: Pure (seq nat8)
(requires is_gctr_plain_LE plain /\ is_aes_key_LE alg key)
(ensures fun _ -> True) | [] | Vale.AES.GCTR_s.gctr_encrypt_LE_def | {
"file_name": "vale/specs/crypto/Vale.AES.GCTR_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
icb_BE: Vale.Def.Types_s.quad32 ->
plain: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 ->
alg: Vale.AES.AES_common_s.algorithm ->
key: FStar.Seq.Base.seq Vale.Def.Types_s.nat32
-> Prims.Pure (FStar.Seq.Base.seq Vale.Def.Types_s.nat8) | {
"end_col": 49,
"end_line": 69,
"start_col": 3,
"start_line": 49
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let is_cmp (#a:eqtype) (is_le:a -> a -> bool) =
(forall (x y:a).{:pattern is_le x y} is_le x y \/ is_le y x) /\
(forall (x y:a).{:pattern is_le x y} is_le x y /\ is_le y x ==> x == y) /\
(forall (x y z:a).{:pattern is_le x y; is_le y z} is_le x y /\ is_le y z ==> is_le x z) | let is_cmp (#a: eqtype) (is_le: (a -> a -> bool)) = | false | null | false | (forall (x: a) (y: a). {:pattern is_le x y} is_le x y \/ is_le y x) /\
(forall (x: a) (y: a). {:pattern is_le x y} is_le x y /\ is_le y x ==> x == y) /\
(forall (x: a) (y: a) (z: a). {:pattern is_le x y; is_le y z} is_le x y /\ is_le y z ==> is_le x z) | {
"checked_file": "Vale.Lib.MapTree.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Lib.MapTree.fsti"
} | [
"total"
] | [
"Prims.eqtype",
"Prims.bool",
"Prims.l_and",
"Prims.l_Forall",
"Prims.l_or",
"Prims.b2t",
"Prims.l_imp",
"Prims.eq2",
"Prims.logical"
] | [] | module Vale.Lib.MapTree
open FStar.Mul
(** Balanced binary search tree *) | false | false | Vale.Lib.MapTree.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val is_cmp : is_le: (_: a -> _: a -> Prims.bool) -> Prims.logical | [] | Vale.Lib.MapTree.is_cmp | {
"file_name": "vale/code/lib/collections/Vale.Lib.MapTree.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | is_le: (_: a -> _: a -> Prims.bool) -> Prims.logical | {
"end_col": 89,
"end_line": 9,
"start_col": 2,
"start_line": 7
} |
|
Prims.Tot | val parse_seq_flbytes (sz: nat)
: Tot (parser (total_constant_size_parser_kind sz) (Seq.lseq byte sz)) | [
{
"abbrev": false,
"full_module": "LowParse.Spec.FLData",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec.SeqBytes",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec.SeqBytes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let parse_seq_flbytes
(sz: nat)
: Tot (parser (total_constant_size_parser_kind sz) (Seq.lseq byte sz))
= tot_parse_seq_flbytes sz | val parse_seq_flbytes (sz: nat)
: Tot (parser (total_constant_size_parser_kind sz) (Seq.lseq byte sz))
let parse_seq_flbytes (sz: nat)
: Tot (parser (total_constant_size_parser_kind sz) (Seq.lseq byte sz)) = | false | null | false | tot_parse_seq_flbytes sz | {
"checked_file": "LowParse.Spec.SeqBytes.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.FLData.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.SeqBytes.Base.fst"
} | [
"total"
] | [
"Prims.nat",
"LowParse.Spec.SeqBytes.Base.tot_parse_seq_flbytes",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.total_constant_size_parser_kind",
"FStar.Seq.Properties.lseq",
"LowParse.Bytes.byte"
] | [] | module LowParse.Spec.SeqBytes.Base
include LowParse.Spec.FLData
let parse_seq_flbytes_gen
(sz: nat)
(s: bytes { Seq.length s == sz } )
: Tot (Seq.lseq byte sz)
= s
let tot_parse_seq_flbytes
(sz: nat)
: Tot (tot_parser (total_constant_size_parser_kind sz) (Seq.lseq byte sz))
= tot_make_total_constant_size_parser sz (Seq.lseq byte sz) (parse_seq_flbytes_gen sz)
let parse_seq_flbytes
(sz: nat) | false | false | LowParse.Spec.SeqBytes.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val parse_seq_flbytes (sz: nat)
: Tot (parser (total_constant_size_parser_kind sz) (Seq.lseq byte sz)) | [] | LowParse.Spec.SeqBytes.Base.parse_seq_flbytes | {
"file_name": "src/lowparse/LowParse.Spec.SeqBytes.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | sz: Prims.nat
-> LowParse.Spec.Base.parser (LowParse.Spec.Base.total_constant_size_parser_kind sz)
(FStar.Seq.Properties.lseq LowParse.Bytes.byte sz) | {
"end_col": 26,
"end_line": 18,
"start_col": 2,
"start_line": 18
} |
Prims.Tot | val serialize_seq_flbytes' (sz: nat) : Tot (bare_serializer (Seq.lseq byte sz)) | [
{
"abbrev": false,
"full_module": "LowParse.Spec.FLData",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec.SeqBytes",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec.SeqBytes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let serialize_seq_flbytes'
(sz: nat)
: Tot (bare_serializer (Seq.lseq byte sz))
= fun (x: Seq.lseq byte sz) -> (
x
) | val serialize_seq_flbytes' (sz: nat) : Tot (bare_serializer (Seq.lseq byte sz))
let serialize_seq_flbytes' (sz: nat) : Tot (bare_serializer (Seq.lseq byte sz)) = | false | null | false | fun (x: Seq.lseq byte sz) -> (x) | {
"checked_file": "LowParse.Spec.SeqBytes.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.FLData.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.SeqBytes.Base.fst"
} | [
"total"
] | [
"Prims.nat",
"FStar.Seq.Properties.lseq",
"LowParse.Bytes.byte",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.bare_serializer"
] | [] | module LowParse.Spec.SeqBytes.Base
include LowParse.Spec.FLData
let parse_seq_flbytes_gen
(sz: nat)
(s: bytes { Seq.length s == sz } )
: Tot (Seq.lseq byte sz)
= s
let tot_parse_seq_flbytes
(sz: nat)
: Tot (tot_parser (total_constant_size_parser_kind sz) (Seq.lseq byte sz))
= tot_make_total_constant_size_parser sz (Seq.lseq byte sz) (parse_seq_flbytes_gen sz)
let parse_seq_flbytes
(sz: nat)
: Tot (parser (total_constant_size_parser_kind sz) (Seq.lseq byte sz))
= tot_parse_seq_flbytes sz
let serialize_seq_flbytes'
(sz: nat) | false | false | LowParse.Spec.SeqBytes.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val serialize_seq_flbytes' (sz: nat) : Tot (bare_serializer (Seq.lseq byte sz)) | [] | LowParse.Spec.SeqBytes.Base.serialize_seq_flbytes' | {
"file_name": "src/lowparse/LowParse.Spec.SeqBytes.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | sz: Prims.nat
-> LowParse.Spec.Base.bare_serializer (FStar.Seq.Properties.lseq LowParse.Bytes.byte sz) | {
"end_col": 3,
"end_line": 25,
"start_col": 2,
"start_line": 23
} |
Prims.Tot | val parse_seq_flbytes_gen (sz: nat) (s: bytes{Seq.length s == sz}) : Tot (Seq.lseq byte sz) | [
{
"abbrev": false,
"full_module": "LowParse.Spec.FLData",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec.SeqBytes",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec.SeqBytes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let parse_seq_flbytes_gen
(sz: nat)
(s: bytes { Seq.length s == sz } )
: Tot (Seq.lseq byte sz)
= s | val parse_seq_flbytes_gen (sz: nat) (s: bytes{Seq.length s == sz}) : Tot (Seq.lseq byte sz)
let parse_seq_flbytes_gen (sz: nat) (s: bytes{Seq.length s == sz}) : Tot (Seq.lseq byte sz) = | false | null | false | s | {
"checked_file": "LowParse.Spec.SeqBytes.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.FLData.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.SeqBytes.Base.fst"
} | [
"total"
] | [
"Prims.nat",
"LowParse.Bytes.bytes",
"Prims.eq2",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"FStar.Seq.Properties.lseq"
] | [] | module LowParse.Spec.SeqBytes.Base
include LowParse.Spec.FLData
let parse_seq_flbytes_gen
(sz: nat)
(s: bytes { Seq.length s == sz } ) | false | false | LowParse.Spec.SeqBytes.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val parse_seq_flbytes_gen (sz: nat) (s: bytes{Seq.length s == sz}) : Tot (Seq.lseq byte sz) | [] | LowParse.Spec.SeqBytes.Base.parse_seq_flbytes_gen | {
"file_name": "src/lowparse/LowParse.Spec.SeqBytes.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | sz: Prims.nat -> s: LowParse.Bytes.bytes{FStar.Seq.Base.length s == sz}
-> FStar.Seq.Properties.lseq LowParse.Bytes.byte sz | {
"end_col": 3,
"end_line": 8,
"start_col": 2,
"start_line": 8
} |
Prims.Tot | val serialize_seq_flbytes (sz: nat) : Tot (serializer (parse_seq_flbytes sz)) | [
{
"abbrev": false,
"full_module": "LowParse.Spec.FLData",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec.SeqBytes",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec.SeqBytes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let serialize_seq_flbytes
(sz: nat)
: Tot (serializer (parse_seq_flbytes sz))
= serialize_seq_flbytes_correct sz;
serialize_seq_flbytes' sz | val serialize_seq_flbytes (sz: nat) : Tot (serializer (parse_seq_flbytes sz))
let serialize_seq_flbytes (sz: nat) : Tot (serializer (parse_seq_flbytes sz)) = | false | null | false | serialize_seq_flbytes_correct sz;
serialize_seq_flbytes' sz | {
"checked_file": "LowParse.Spec.SeqBytes.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.FLData.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.SeqBytes.Base.fst"
} | [
"total"
] | [
"Prims.nat",
"LowParse.Spec.SeqBytes.Base.serialize_seq_flbytes'",
"Prims.unit",
"LowParse.Spec.SeqBytes.Base.serialize_seq_flbytes_correct",
"LowParse.Spec.Base.serializer",
"LowParse.Spec.Base.total_constant_size_parser_kind",
"FStar.Seq.Properties.lseq",
"LowParse.Bytes.byte",
"LowParse.Spec.SeqBytes.Base.parse_seq_flbytes"
] | [] | module LowParse.Spec.SeqBytes.Base
include LowParse.Spec.FLData
let parse_seq_flbytes_gen
(sz: nat)
(s: bytes { Seq.length s == sz } )
: Tot (Seq.lseq byte sz)
= s
let tot_parse_seq_flbytes
(sz: nat)
: Tot (tot_parser (total_constant_size_parser_kind sz) (Seq.lseq byte sz))
= tot_make_total_constant_size_parser sz (Seq.lseq byte sz) (parse_seq_flbytes_gen sz)
let parse_seq_flbytes
(sz: nat)
: Tot (parser (total_constant_size_parser_kind sz) (Seq.lseq byte sz))
= tot_parse_seq_flbytes sz
let serialize_seq_flbytes'
(sz: nat)
: Tot (bare_serializer (Seq.lseq byte sz))
= fun (x: Seq.lseq byte sz) -> (
x
)
let serialize_seq_flbytes_correct
(sz: nat)
: Lemma
(serializer_correct (parse_seq_flbytes sz) (serialize_seq_flbytes' sz))
= let prf
(input: Seq.lseq byte sz)
: Lemma
(
let ser = serialize_seq_flbytes' sz input in
Seq.length ser == sz /\
parse (parse_seq_flbytes sz) ser == Some (input, sz)
)
= ()
in
Classical.forall_intro prf
let serialize_seq_flbytes
(sz: nat) | false | false | LowParse.Spec.SeqBytes.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val serialize_seq_flbytes (sz: nat) : Tot (serializer (parse_seq_flbytes sz)) | [] | LowParse.Spec.SeqBytes.Base.serialize_seq_flbytes | {
"file_name": "src/lowparse/LowParse.Spec.SeqBytes.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | sz: Prims.nat -> LowParse.Spec.Base.serializer (LowParse.Spec.SeqBytes.Base.parse_seq_flbytes sz) | {
"end_col": 27,
"end_line": 47,
"start_col": 2,
"start_line": 46
} |
Prims.Tot | val tot_parse_seq_flbytes (sz: nat)
: Tot (tot_parser (total_constant_size_parser_kind sz) (Seq.lseq byte sz)) | [
{
"abbrev": false,
"full_module": "LowParse.Spec.FLData",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec.SeqBytes",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec.SeqBytes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tot_parse_seq_flbytes
(sz: nat)
: Tot (tot_parser (total_constant_size_parser_kind sz) (Seq.lseq byte sz))
= tot_make_total_constant_size_parser sz (Seq.lseq byte sz) (parse_seq_flbytes_gen sz) | val tot_parse_seq_flbytes (sz: nat)
: Tot (tot_parser (total_constant_size_parser_kind sz) (Seq.lseq byte sz))
let tot_parse_seq_flbytes (sz: nat)
: Tot (tot_parser (total_constant_size_parser_kind sz) (Seq.lseq byte sz)) = | false | null | false | tot_make_total_constant_size_parser sz (Seq.lseq byte sz) (parse_seq_flbytes_gen sz) | {
"checked_file": "LowParse.Spec.SeqBytes.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.FLData.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.SeqBytes.Base.fst"
} | [
"total"
] | [
"Prims.nat",
"LowParse.Spec.Combinators.tot_make_total_constant_size_parser",
"FStar.Seq.Properties.lseq",
"LowParse.Bytes.byte",
"LowParse.Spec.SeqBytes.Base.parse_seq_flbytes_gen",
"LowParse.Spec.Base.tot_parser",
"LowParse.Spec.Base.total_constant_size_parser_kind"
] | [] | module LowParse.Spec.SeqBytes.Base
include LowParse.Spec.FLData
let parse_seq_flbytes_gen
(sz: nat)
(s: bytes { Seq.length s == sz } )
: Tot (Seq.lseq byte sz)
= s
let tot_parse_seq_flbytes
(sz: nat) | false | false | LowParse.Spec.SeqBytes.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tot_parse_seq_flbytes (sz: nat)
: Tot (tot_parser (total_constant_size_parser_kind sz) (Seq.lseq byte sz)) | [] | LowParse.Spec.SeqBytes.Base.tot_parse_seq_flbytes | {
"file_name": "src/lowparse/LowParse.Spec.SeqBytes.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | sz: Prims.nat
-> LowParse.Spec.Base.tot_parser (LowParse.Spec.Base.total_constant_size_parser_kind sz)
(FStar.Seq.Properties.lseq LowParse.Bytes.byte sz) | {
"end_col": 86,
"end_line": 13,
"start_col": 2,
"start_line": 13
} |
FStar.Pervasives.Lemma | val serialize_seq_flbytes_correct (sz: nat)
: Lemma (serializer_correct (parse_seq_flbytes sz) (serialize_seq_flbytes' sz)) | [
{
"abbrev": false,
"full_module": "LowParse.Spec.FLData",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec.SeqBytes",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec.SeqBytes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let serialize_seq_flbytes_correct
(sz: nat)
: Lemma
(serializer_correct (parse_seq_flbytes sz) (serialize_seq_flbytes' sz))
= let prf
(input: Seq.lseq byte sz)
: Lemma
(
let ser = serialize_seq_flbytes' sz input in
Seq.length ser == sz /\
parse (parse_seq_flbytes sz) ser == Some (input, sz)
)
= ()
in
Classical.forall_intro prf | val serialize_seq_flbytes_correct (sz: nat)
: Lemma (serializer_correct (parse_seq_flbytes sz) (serialize_seq_flbytes' sz))
let serialize_seq_flbytes_correct (sz: nat)
: Lemma (serializer_correct (parse_seq_flbytes sz) (serialize_seq_flbytes' sz)) = | false | null | true | let prf (input: Seq.lseq byte sz)
: Lemma
(let ser = serialize_seq_flbytes' sz input in
Seq.length ser == sz /\ parse (parse_seq_flbytes sz) ser == Some (input, sz)) =
()
in
Classical.forall_intro prf | {
"checked_file": "LowParse.Spec.SeqBytes.Base.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.FLData.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.SeqBytes.Base.fst"
} | [
"lemma"
] | [
"Prims.nat",
"FStar.Classical.forall_intro",
"FStar.Seq.Properties.lseq",
"LowParse.Bytes.byte",
"Prims.l_and",
"Prims.eq2",
"FStar.Seq.Base.length",
"LowParse.Spec.SeqBytes.Base.serialize_seq_flbytes'",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"LowParse.Spec.SeqBytes.Base.parse_seq_flbytes",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.serializer_correct",
"LowParse.Spec.Base.total_constant_size_parser_kind"
] | [] | module LowParse.Spec.SeqBytes.Base
include LowParse.Spec.FLData
let parse_seq_flbytes_gen
(sz: nat)
(s: bytes { Seq.length s == sz } )
: Tot (Seq.lseq byte sz)
= s
let tot_parse_seq_flbytes
(sz: nat)
: Tot (tot_parser (total_constant_size_parser_kind sz) (Seq.lseq byte sz))
= tot_make_total_constant_size_parser sz (Seq.lseq byte sz) (parse_seq_flbytes_gen sz)
let parse_seq_flbytes
(sz: nat)
: Tot (parser (total_constant_size_parser_kind sz) (Seq.lseq byte sz))
= tot_parse_seq_flbytes sz
let serialize_seq_flbytes'
(sz: nat)
: Tot (bare_serializer (Seq.lseq byte sz))
= fun (x: Seq.lseq byte sz) -> (
x
)
let serialize_seq_flbytes_correct
(sz: nat)
: Lemma | false | false | LowParse.Spec.SeqBytes.Base.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val serialize_seq_flbytes_correct (sz: nat)
: Lemma (serializer_correct (parse_seq_flbytes sz) (serialize_seq_flbytes' sz)) | [] | LowParse.Spec.SeqBytes.Base.serialize_seq_flbytes_correct | {
"file_name": "src/lowparse/LowParse.Spec.SeqBytes.Base.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | sz: Prims.nat
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Base.serializer_correct (LowParse.Spec.SeqBytes.Base.parse_seq_flbytes sz)
(LowParse.Spec.SeqBytes.Base.serialize_seq_flbytes' sz)) | {
"end_col": 28,
"end_line": 41,
"start_col": 1,
"start_line": 31
} |
Prims.Tot | val add_frame (#g:env) (#t:st_term) (#c:comp_st) (t_typing:st_typing g t c)
(#frame:vprop)
(frame_typing:tot_typing g frame tm_vprop)
: t':st_term &
c':comp_st { c' == add_frame c frame } &
st_typing g t' c' | [
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Syntax.Printer",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let add_frame (#g:env) (#t:st_term) (#c:comp_st) (t_typing:st_typing g t c)
(#frame:vprop)
(frame_typing:tot_typing g frame tm_vprop)
: t':st_term &
c':comp_st { c' == add_frame c frame } &
st_typing g t' c' =
(| t, add_frame c frame, T_Frame _ _ _ _ frame_typing t_typing |) | val add_frame (#g:env) (#t:st_term) (#c:comp_st) (t_typing:st_typing g t c)
(#frame:vprop)
(frame_typing:tot_typing g frame tm_vprop)
: t':st_term &
c':comp_st { c' == add_frame c frame } &
st_typing g t' c'
let add_frame
(#g: env)
(#t: st_term)
(#c: comp_st)
(t_typing: st_typing g t c)
(#frame: vprop)
(frame_typing: tot_typing g frame tm_vprop)
: t': st_term & c': comp_st{c' == add_frame c frame} & st_typing g t' c' = | false | null | false | (| t, add_frame c frame, T_Frame _ _ _ _ frame_typing t_typing |) | {
"checked_file": "Pulse.Typing.Combinators.fst.checked",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.Printer.fsti.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": true,
"source_file": "Pulse.Typing.Combinators.fst"
} | [
"total"
] | [
"Pulse.Typing.Env.env",
"Pulse.Syntax.Base.st_term",
"Pulse.Syntax.Base.comp_st",
"Pulse.Typing.st_typing",
"Pulse.Syntax.Base.vprop",
"Pulse.Typing.tot_typing",
"Pulse.Syntax.Base.tm_vprop",
"FStar.Pervasives.Mkdtuple3",
"Prims.eq2",
"Pulse.Typing.add_frame",
"Pulse.Typing.T_Frame",
"FStar.Pervasives.dtuple3"
] | [] | module Pulse.Typing.Combinators
module RT = FStar.Reflection.Typing
module R = FStar.Reflection.V2
module L = FStar.List.Tot
module T = FStar.Tactics.V2
module P = Pulse.Syntax.Printer
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing
open Pulse.Checker.Pure
let rec vprop_equiv_typing (#g:_) (#t0 #t1:term) (v:vprop_equiv g t0 t1)
: GTot ((tot_typing g t0 tm_vprop -> tot_typing g t1 tm_vprop) &
(tot_typing g t1 tm_vprop -> tot_typing g t0 tm_vprop))
(decreases v)
= match v with
| VE_Refl _ _ -> (fun x -> x), (fun x -> x)
| VE_Sym _ _ _ v' ->
let f, g = vprop_equiv_typing v' in
g, f
| VE_Trans g t0 t2 t1 v02 v21 ->
let f02, f20 = vprop_equiv_typing v02 in
let f21, f12 = vprop_equiv_typing v21 in
(fun x -> f21 (f02 x)),
(fun x -> f20 (f12 x))
| VE_Ctxt g s0 s1 s0' s1' v0 v1 ->
let f0, f0' = vprop_equiv_typing v0 in
let f1, f1' = vprop_equiv_typing v1 in
let ff (x:tot_typing g (tm_star s0 s1) tm_vprop)
: tot_typing g (tm_star s0' s1') tm_vprop
= let s0_typing = star_typing_inversion_l x in
let s1_typing = star_typing_inversion_r x in
let s0'_typing, s1'_typing = f0 s0_typing, f1 s1_typing in
star_typing s0'_typing s1'_typing
in
let gg (x:tot_typing g (tm_star s0' s1') tm_vprop)
: tot_typing g (tm_star s0 s1) tm_vprop
= let s0'_typing = star_typing_inversion_l x in
let s1'_typing = star_typing_inversion_r x in
star_typing (f0' s0'_typing) (f1' s1'_typing)
in
ff, gg
| VE_Unit g t ->
let fwd (x:tot_typing g (tm_star tm_emp t) tm_vprop)
: tot_typing g t tm_vprop
= let r = star_typing_inversion_r x in
r
in
let bk (x:tot_typing g t tm_vprop)
: tot_typing g (tm_star tm_emp t) tm_vprop
= star_typing emp_typing x
in
fwd, bk
| VE_Comm g t0 t1 ->
let f t0 t1 (x:tot_typing g (tm_star t0 t1) tm_vprop)
: tot_typing g (tm_star t1 t0) tm_vprop
= let tt0 = star_typing_inversion_l x in
let tt1 = star_typing_inversion_r x in
star_typing tt1 tt0
in
f t0 t1, f t1 t0
| VE_Assoc g t0 t1 t2 ->
let fwd (x:tot_typing g (tm_star t0 (tm_star t1 t2)) tm_vprop)
: tot_typing g (tm_star (tm_star t0 t1) t2) tm_vprop
= let tt0 = star_typing_inversion_l x in
let tt12 = star_typing_inversion_r x in
let tt1 = star_typing_inversion_l tt12 in
let tt2 = star_typing_inversion_r tt12 in
star_typing (star_typing tt0 tt1) tt2
in
let bk (x : tot_typing g (tm_star (tm_star t0 t1) t2) tm_vprop)
: tot_typing g (tm_star t0 (tm_star t1 t2)) tm_vprop
= let tt01 = star_typing_inversion_l x in
let tt2 = star_typing_inversion_r x in
let tt0 = star_typing_inversion_l tt01 in
let tt1 = star_typing_inversion_r tt01 in
star_typing tt0 (star_typing tt1 tt2)
in
fwd, bk
| VE_Ext g t0 t1 token ->
let d1, d2 = vprop_eq_typing_inversion g t0 t1 token in
(fun _ -> d2),
(fun _ -> d1)
#push-options "--z3rlimit_factor 8 --ifuel 1 --fuel 2 --query_stats"
let rec mk_bind (g:env)
(pre:term)
(e1:st_term)
(e2:st_term)
(c1:comp_st)
(c2:comp_st)
(px:nvar { ~ (Set.mem (snd px) (dom g)) })
(d_e1:st_typing g e1 c1)
(d_c1res:tot_typing g (comp_res c1) (tm_type (comp_u c1)))
(d_e2:st_typing (push_binding g (snd px) (fst px) (comp_res c1)) (open_st_term_nv e2 px) c2)
(res_typing:universe_of g (comp_res c2) (comp_u c2))
(post_typing:tot_typing (push_binding g (snd px) (fst px) (comp_res c2))
(open_term_nv (comp_post c2) px)
tm_vprop)
: T.TacH (t:st_term &
c:comp_st { st_comp_of_comp c == st_comp_with_pre (st_comp_of_comp c2) pre } &
st_typing g t c)
(requires fun _ ->
let _, x = px in
comp_pre c1 == pre /\
None? (lookup g x) /\
(~(x `Set.mem` freevars_st e2)) /\
open_term (comp_post c1) x == comp_pre c2 /\
(~ (x `Set.mem` freevars (comp_post c2))))
(ensures fun _ _ -> True) =
let _, x = px in
let b = nvar_as_binder px (comp_res c1) in
match c1, c2 with
| C_ST _, C_ST _ ->
let bc = Bind_comp g x c1 c2 res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
| C_STGhost inames1 _, C_STGhost inames2 _ ->
if eq_tm inames1 inames2
then begin
let bc = Bind_comp g x c1 c2 res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose two stghost computations with different opened invariants"
| C_STAtomic inames _, C_ST _ ->
if eq_tm inames tm_emp_inames
then begin
let c1lifted = C_ST (st_comp_of_comp c1) in
let d_e1 : st_typing g e1 c1lifted =
T_Lift _ _ _ c1lifted d_e1 (Lift_STAtomic_ST _ c1) in
let bc = Bind_comp g x c1lifted c2 res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose atomic with non-emp opened invariants with stt"
| C_STGhost inames1 _, C_STAtomic inames2 _ ->
if eq_tm inames1 inames2
then begin
let w = get_non_informative_witness g (comp_u c1) (comp_res c1) in
let bc = Bind_comp_ghost_l g x c1 c2 w res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose ghost and atomic with different opened invariants"
| C_STAtomic inames1 _, C_STGhost inames2 _ ->
if eq_tm inames1 inames2
then begin
let w = get_non_informative_witness g (comp_u c2) (comp_res c2) in
let bc = Bind_comp_ghost_r g x c1 c2 w res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose atomic and ghost with different opened invariants"
| C_ST _, C_STAtomic inames _ ->
if eq_tm inames tm_emp_inames
then begin
let c2lifted = C_ST (st_comp_of_comp c2) in
let g' = push_binding g x (fst px) (comp_res c1) in
let d_e2 : st_typing g' (open_st_term_nv e2 px) c2lifted =
T_Lift _ _ _ c2lifted d_e2 (Lift_STAtomic_ST _ c2) in
let bc = Bind_comp g x c1 c2lifted res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose stt with atomic with non-emp opened invariants"
| C_STGhost inames _, C_ST _ ->
if eq_tm inames tm_emp_inames
then begin
let w = get_non_informative_witness g (comp_u c1) (comp_res c1) in
let c1lifted = C_STAtomic inames (st_comp_of_comp c1) in
let d_e1 : st_typing g e1 c1lifted =
T_Lift _ _ _ c1lifted d_e1 (Lift_STGhost_STAtomic g c1 w) in
mk_bind g pre e1 e2 c1lifted c2 px d_e1 d_c1res d_e2 res_typing post_typing
end
else fail g None "Cannot compose ghost with stt with non-emp opened invariants"
| C_ST _, C_STGhost inames _ ->
if eq_tm inames tm_emp_inames
then begin
let g' = push_binding g x (fst px) (comp_res c1) in
let w = get_non_informative_witness g' (comp_u c2) (comp_res c2) in
let c2lifted = C_STAtomic inames (st_comp_of_comp c2) in
let d_e2 : st_typing g' (open_st_term_nv e2 px) c2lifted =
T_Lift _ _ _ c2lifted d_e2 (Lift_STGhost_STAtomic g' c2 w) in
let (| t, c, d |) = mk_bind g pre e1 e2 c1 c2lifted px d_e1 d_c1res d_e2 res_typing post_typing in
(| t, c, d |)
end
else fail g None "Cannot compose stt with ghost with non-emp opened invariants"
| C_STAtomic inames _, C_STAtomic _ _ ->
if eq_tm inames tm_emp_inames
then begin
let c1lifted = C_ST (st_comp_of_comp c1) in
let d_e1 : st_typing g e1 c1lifted =
T_Lift _ _ _ c1lifted d_e1 (Lift_STAtomic_ST _ c1) in
mk_bind g pre e1 e2 c1lifted c2 px d_e1 d_c1res d_e2 res_typing post_typing
end
else fail g None "Cannot compose statomics with non-emp opened invariants"
| _, _ -> fail g None "bind either not implemented (e.g. ghost) or not possible"
#pop-options
let bind_res_and_post_typing (g:env) (s2:st_comp) (x:var { fresh_wrt x g (freevars s2.post) })
(post_hint:post_hint_opt g { comp_post_matches_hint (C_ST s2) post_hint })
: T.Tac (universe_of g s2.res s2.u &
tot_typing (push_binding g x ppname_default s2.res) (open_term_nv s2.post (v_as_nv x)) tm_vprop)
= match post_hint with
| None ->
(* We're inferring a post, so these checks are unavoidable *)
(* since we need to type the result in a smaller env g *)
let (| u, res_typing |) = check_universe g s2.res in
if not (eq_univ u s2.u)
then fail g None "Unexpected universe for result type"
else if x `Set.mem` freevars s2.post
then fail g None (Printf.sprintf "Bound variable %d escapes scope in postcondition %s" x (P.term_to_string s2.post))
else (
let y = x in //fresh g in
let s2_post_opened = open_term_nv s2.post (v_as_nv y) in
let post_typing =
check_vprop_with_core (push_binding g y ppname_default s2.res) s2_post_opened in
res_typing, post_typing
)
| Some post ->
if x `Set.mem` freevars s2.post
then fail g None "Unexpected mismatched postcondition in bind" //exclude with a stronger type on check'
else (
let pr = post_hint_typing g post x in
pr.ty_typing, pr.post_typing
)
let add_frame (#g:env) (#t:st_term) (#c:comp_st) (t_typing:st_typing g t c)
(#frame:vprop)
(frame_typing:tot_typing g frame tm_vprop)
: t':st_term &
c':comp_st { c' == add_frame c frame } &
st_typing g t' c' = | false | false | Pulse.Typing.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val add_frame (#g:env) (#t:st_term) (#c:comp_st) (t_typing:st_typing g t c)
(#frame:vprop)
(frame_typing:tot_typing g frame tm_vprop)
: t':st_term &
c':comp_st { c' == add_frame c frame } &
st_typing g t' c' | [] | Pulse.Typing.Combinators.add_frame | {
"file_name": "lib/steel/pulse/Pulse.Typing.Combinators.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} |
t_typing: Pulse.Typing.st_typing g t c ->
frame_typing: Pulse.Typing.tot_typing g frame Pulse.Syntax.Base.tm_vprop
-> FStar.Pervasives.dtuple3 Pulse.Syntax.Base.st_term
(fun _ -> c': Pulse.Syntax.Base.comp_st{c' == Pulse.Typing.add_frame c frame})
(fun t' c' -> Pulse.Typing.st_typing g t' c') | {
"end_col": 67,
"end_line": 241,
"start_col": 2,
"start_line": 241
} |
Prims.GTot | val vprop_equiv_typing (#g:_) (#t0 #t1:term) (v:vprop_equiv g t0 t1)
: GTot ((tot_typing g t0 tm_vprop -> tot_typing g t1 tm_vprop) &
(tot_typing g t1 tm_vprop -> tot_typing g t0 tm_vprop)) | [
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Syntax.Printer",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec vprop_equiv_typing (#g:_) (#t0 #t1:term) (v:vprop_equiv g t0 t1)
: GTot ((tot_typing g t0 tm_vprop -> tot_typing g t1 tm_vprop) &
(tot_typing g t1 tm_vprop -> tot_typing g t0 tm_vprop))
(decreases v)
= match v with
| VE_Refl _ _ -> (fun x -> x), (fun x -> x)
| VE_Sym _ _ _ v' ->
let f, g = vprop_equiv_typing v' in
g, f
| VE_Trans g t0 t2 t1 v02 v21 ->
let f02, f20 = vprop_equiv_typing v02 in
let f21, f12 = vprop_equiv_typing v21 in
(fun x -> f21 (f02 x)),
(fun x -> f20 (f12 x))
| VE_Ctxt g s0 s1 s0' s1' v0 v1 ->
let f0, f0' = vprop_equiv_typing v0 in
let f1, f1' = vprop_equiv_typing v1 in
let ff (x:tot_typing g (tm_star s0 s1) tm_vprop)
: tot_typing g (tm_star s0' s1') tm_vprop
= let s0_typing = star_typing_inversion_l x in
let s1_typing = star_typing_inversion_r x in
let s0'_typing, s1'_typing = f0 s0_typing, f1 s1_typing in
star_typing s0'_typing s1'_typing
in
let gg (x:tot_typing g (tm_star s0' s1') tm_vprop)
: tot_typing g (tm_star s0 s1) tm_vprop
= let s0'_typing = star_typing_inversion_l x in
let s1'_typing = star_typing_inversion_r x in
star_typing (f0' s0'_typing) (f1' s1'_typing)
in
ff, gg
| VE_Unit g t ->
let fwd (x:tot_typing g (tm_star tm_emp t) tm_vprop)
: tot_typing g t tm_vprop
= let r = star_typing_inversion_r x in
r
in
let bk (x:tot_typing g t tm_vprop)
: tot_typing g (tm_star tm_emp t) tm_vprop
= star_typing emp_typing x
in
fwd, bk
| VE_Comm g t0 t1 ->
let f t0 t1 (x:tot_typing g (tm_star t0 t1) tm_vprop)
: tot_typing g (tm_star t1 t0) tm_vprop
= let tt0 = star_typing_inversion_l x in
let tt1 = star_typing_inversion_r x in
star_typing tt1 tt0
in
f t0 t1, f t1 t0
| VE_Assoc g t0 t1 t2 ->
let fwd (x:tot_typing g (tm_star t0 (tm_star t1 t2)) tm_vprop)
: tot_typing g (tm_star (tm_star t0 t1) t2) tm_vprop
= let tt0 = star_typing_inversion_l x in
let tt12 = star_typing_inversion_r x in
let tt1 = star_typing_inversion_l tt12 in
let tt2 = star_typing_inversion_r tt12 in
star_typing (star_typing tt0 tt1) tt2
in
let bk (x : tot_typing g (tm_star (tm_star t0 t1) t2) tm_vprop)
: tot_typing g (tm_star t0 (tm_star t1 t2)) tm_vprop
= let tt01 = star_typing_inversion_l x in
let tt2 = star_typing_inversion_r x in
let tt0 = star_typing_inversion_l tt01 in
let tt1 = star_typing_inversion_r tt01 in
star_typing tt0 (star_typing tt1 tt2)
in
fwd, bk
| VE_Ext g t0 t1 token ->
let d1, d2 = vprop_eq_typing_inversion g t0 t1 token in
(fun _ -> d2),
(fun _ -> d1) | val vprop_equiv_typing (#g:_) (#t0 #t1:term) (v:vprop_equiv g t0 t1)
: GTot ((tot_typing g t0 tm_vprop -> tot_typing g t1 tm_vprop) &
(tot_typing g t1 tm_vprop -> tot_typing g t0 tm_vprop))
let rec vprop_equiv_typing (#g: _) (#t0 #t1: term) (v: vprop_equiv g t0 t1)
: GTot
((tot_typing g t0 tm_vprop -> tot_typing g t1 tm_vprop) &
(tot_typing g t1 tm_vprop -> tot_typing g t0 tm_vprop)) (decreases v) = | false | null | false | match v with
| VE_Refl _ _ -> (fun x -> x), (fun x -> x)
| VE_Sym _ _ _ v' ->
let f, g = vprop_equiv_typing v' in
g, f
| VE_Trans g t0 t2 t1 v02 v21 ->
let f02, f20 = vprop_equiv_typing v02 in
let f21, f12 = vprop_equiv_typing v21 in
(fun x -> f21 (f02 x)), (fun x -> f20 (f12 x))
| VE_Ctxt g s0 s1 s0' s1' v0 v1 ->
let f0, f0' = vprop_equiv_typing v0 in
let f1, f1' = vprop_equiv_typing v1 in
let ff (x: tot_typing g (tm_star s0 s1) tm_vprop) : tot_typing g (tm_star s0' s1') tm_vprop =
let s0_typing = star_typing_inversion_l x in
let s1_typing = star_typing_inversion_r x in
let s0'_typing, s1'_typing = f0 s0_typing, f1 s1_typing in
star_typing s0'_typing s1'_typing
in
let gg (x: tot_typing g (tm_star s0' s1') tm_vprop) : tot_typing g (tm_star s0 s1) tm_vprop =
let s0'_typing = star_typing_inversion_l x in
let s1'_typing = star_typing_inversion_r x in
star_typing (f0' s0'_typing) (f1' s1'_typing)
in
ff, gg
| VE_Unit g t ->
let fwd (x: tot_typing g (tm_star tm_emp t) tm_vprop) : tot_typing g t tm_vprop =
let r = star_typing_inversion_r x in
r
in
let bk (x: tot_typing g t tm_vprop) : tot_typing g (tm_star tm_emp t) tm_vprop =
star_typing emp_typing x
in
fwd, bk
| VE_Comm g t0 t1 ->
let f t0 t1 (x: tot_typing g (tm_star t0 t1) tm_vprop) : tot_typing g (tm_star t1 t0) tm_vprop =
let tt0 = star_typing_inversion_l x in
let tt1 = star_typing_inversion_r x in
star_typing tt1 tt0
in
f t0 t1, f t1 t0
| VE_Assoc g t0 t1 t2 ->
let fwd (x: tot_typing g (tm_star t0 (tm_star t1 t2)) tm_vprop)
: tot_typing g (tm_star (tm_star t0 t1) t2) tm_vprop =
let tt0 = star_typing_inversion_l x in
let tt12 = star_typing_inversion_r x in
let tt1 = star_typing_inversion_l tt12 in
let tt2 = star_typing_inversion_r tt12 in
star_typing (star_typing tt0 tt1) tt2
in
let bk (x: tot_typing g (tm_star (tm_star t0 t1) t2) tm_vprop)
: tot_typing g (tm_star t0 (tm_star t1 t2)) tm_vprop =
let tt01 = star_typing_inversion_l x in
let tt2 = star_typing_inversion_r x in
let tt0 = star_typing_inversion_l tt01 in
let tt1 = star_typing_inversion_r tt01 in
star_typing tt0 (star_typing tt1 tt2)
in
fwd, bk
| VE_Ext g t0 t1 token ->
let d1, d2 = vprop_eq_typing_inversion g t0 t1 token in
(fun _ -> d2), (fun _ -> d1) | {
"checked_file": "Pulse.Typing.Combinators.fst.checked",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.Printer.fsti.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": true,
"source_file": "Pulse.Typing.Combinators.fst"
} | [
"",
"sometrivial"
] | [
"Pulse.Typing.Env.env",
"Pulse.Syntax.Base.term",
"Pulse.Typing.vprop_equiv",
"FStar.Pervasives.Native.Mktuple2",
"Pulse.Typing.tot_typing",
"Pulse.Syntax.Base.tm_vprop",
"FStar.Pervasives.Native.tuple2",
"Pulse.Typing.Combinators.vprop_equiv_typing",
"Pulse.Syntax.Base.tm_star",
"Pulse.Typing.star_typing",
"Pulse.Typing.star_typing_inversion_r",
"Pulse.Typing.star_typing_inversion_l",
"Pulse.Syntax.Base.tm_emp",
"Pulse.Typing.emp_typing",
"Pulse.Syntax.Base.vprop",
"FStar.Tactics.Types.equiv_token",
"Pulse.Typing.elab_env",
"Pulse.Elaborate.Pure.elab_term",
"Pulse.Typing.vprop_eq_typing_inversion"
] | [] | module Pulse.Typing.Combinators
module RT = FStar.Reflection.Typing
module R = FStar.Reflection.V2
module L = FStar.List.Tot
module T = FStar.Tactics.V2
module P = Pulse.Syntax.Printer
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing
open Pulse.Checker.Pure
let rec vprop_equiv_typing (#g:_) (#t0 #t1:term) (v:vprop_equiv g t0 t1)
: GTot ((tot_typing g t0 tm_vprop -> tot_typing g t1 tm_vprop) &
(tot_typing g t1 tm_vprop -> tot_typing g t0 tm_vprop)) | false | false | Pulse.Typing.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val vprop_equiv_typing (#g:_) (#t0 #t1:term) (v:vprop_equiv g t0 t1)
: GTot ((tot_typing g t0 tm_vprop -> tot_typing g t1 tm_vprop) &
(tot_typing g t1 tm_vprop -> tot_typing g t0 tm_vprop)) | [
"recursion"
] | Pulse.Typing.Combinators.vprop_equiv_typing | {
"file_name": "lib/steel/pulse/Pulse.Typing.Combinators.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | v: Pulse.Typing.vprop_equiv g t0 t1
-> Prims.GTot
((_: Pulse.Typing.tot_typing g t0 Pulse.Syntax.Base.tm_vprop
-> Pulse.Typing.tot_typing g t1 Pulse.Syntax.Base.tm_vprop) *
(_: Pulse.Typing.tot_typing g t1 Pulse.Syntax.Base.tm_vprop
-> Pulse.Typing.tot_typing g t0 Pulse.Syntax.Base.tm_vprop)) | {
"end_col": 19,
"end_line": 92,
"start_col": 4,
"start_line": 18
} |
Prims.Tot | val apply_frame (#g:env)
(#t:st_term)
(#ctxt:term)
(ctxt_typing: tot_typing g ctxt tm_vprop)
(#c:comp { stateful_comp c })
(t_typing: st_typing g t c)
(frame_t:frame_for_req_in_ctxt g ctxt (comp_pre c))
: Tot (c':comp_st { comp_pre c' == ctxt /\
comp_res c' == comp_res c /\
comp_u c' == comp_u c /\
comp_post c' == tm_star (comp_post c) (frame_of frame_t) } &
st_typing g t c') | [
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Syntax.Printer",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let apply_frame (#g:env)
(#t:st_term)
(#ctxt:term)
(ctxt_typing: tot_typing g ctxt tm_vprop)
(#c:comp { stateful_comp c })
(t_typing: st_typing g t c)
(frame_t:frame_for_req_in_ctxt g ctxt (comp_pre c))
: Tot (c':comp_st { comp_pre c' == ctxt /\
comp_res c' == comp_res c /\
comp_u c' == comp_u c /\
comp_post c' == tm_star (comp_post c) (frame_of frame_t) } &
st_typing g t c')
= let s = st_comp_of_comp c in
let (| frame, frame_typing, ve |) = frame_t in
let t_typing
: st_typing g t (Pulse.Typing.add_frame c frame)
= T_Frame g t c frame frame_typing t_typing in
let c' = Pulse.Typing.add_frame c frame in
let c'_typing = Metatheory.st_typing_correctness t_typing in
let s' = st_comp_of_comp c' in
let ve: vprop_equiv g s'.pre ctxt = ve in
let s'' = { s' with pre = ctxt } in
let c'' = c' `with_st_comp` s'' in
assert (comp_post c' == comp_post c'');
let ve: vprop_equiv g (comp_pre c') (comp_pre c'') = ve in
let st_typing = Metatheory.comp_typing_inversion c'_typing in
let (| res_typing, pre_typing, x, post_typing |) = Metatheory.st_comp_typing_inversion st_typing in
let st_equiv = ST_VPropEquiv g c' c'' x pre_typing res_typing post_typing (RT.Rel_refl _ _ _) ve (VE_Refl _ _) in
let t_typing = T_Equiv _ _ _ _ t_typing st_equiv in
(| c'', t_typing |) | val apply_frame (#g:env)
(#t:st_term)
(#ctxt:term)
(ctxt_typing: tot_typing g ctxt tm_vprop)
(#c:comp { stateful_comp c })
(t_typing: st_typing g t c)
(frame_t:frame_for_req_in_ctxt g ctxt (comp_pre c))
: Tot (c':comp_st { comp_pre c' == ctxt /\
comp_res c' == comp_res c /\
comp_u c' == comp_u c /\
comp_post c' == tm_star (comp_post c) (frame_of frame_t) } &
st_typing g t c')
let apply_frame
(#g: env)
(#t: st_term)
(#ctxt: term)
(ctxt_typing: tot_typing g ctxt tm_vprop)
(#c: comp{stateful_comp c})
(t_typing: st_typing g t c)
(frame_t: frame_for_req_in_ctxt g ctxt (comp_pre c))
: Tot
(c':
comp_st
{ comp_pre c' == ctxt /\ comp_res c' == comp_res c /\ comp_u c' == comp_u c /\
comp_post c' == tm_star (comp_post c) (frame_of frame_t) } &
st_typing g t c') = | false | null | false | let s = st_comp_of_comp c in
let (| frame , frame_typing , ve |) = frame_t in
let t_typing:st_typing g t (Pulse.Typing.add_frame c frame) =
T_Frame g t c frame frame_typing t_typing
in
let c' = Pulse.Typing.add_frame c frame in
let c'_typing = Metatheory.st_typing_correctness t_typing in
let s' = st_comp_of_comp c' in
let ve:vprop_equiv g s'.pre ctxt = ve in
let s'' = { s' with pre = ctxt } in
let c'' = c' `with_st_comp` s'' in
assert (comp_post c' == comp_post c'');
let ve:vprop_equiv g (comp_pre c') (comp_pre c'') = ve in
let st_typing = Metatheory.comp_typing_inversion c'_typing in
let (| res_typing , pre_typing , x , post_typing |) =
Metatheory.st_comp_typing_inversion st_typing
in
let st_equiv =
ST_VPropEquiv g c' c'' x pre_typing res_typing post_typing (RT.Rel_refl _ _ _) ve (VE_Refl _ _)
in
let t_typing = T_Equiv _ _ _ _ t_typing st_equiv in
(| c'', t_typing |) | {
"checked_file": "Pulse.Typing.Combinators.fst.checked",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.Printer.fsti.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": true,
"source_file": "Pulse.Typing.Combinators.fst"
} | [
"total"
] | [
"Pulse.Typing.Env.env",
"Pulse.Syntax.Base.st_term",
"Pulse.Syntax.Base.term",
"Pulse.Typing.tot_typing",
"Pulse.Syntax.Base.tm_vprop",
"Pulse.Syntax.Base.comp",
"Prims.b2t",
"Pulse.Syntax.Base.stateful_comp",
"Pulse.Typing.st_typing",
"Pulse.Typing.Combinators.frame_for_req_in_ctxt",
"Pulse.Syntax.Base.comp_pre",
"Pulse.Typing.vprop_equiv",
"Pulse.Syntax.Base.tm_star",
"Pulse.Typing.universe_of",
"Pulse.Syntax.Base.__proj__Mkst_comp__item__res",
"Pulse.Syntax.Base.st_comp_of_comp",
"Pulse.Typing.add_frame",
"Pulse.Syntax.Base.__proj__Mkst_comp__item__u",
"Pulse.Syntax.Base.__proj__Mkst_comp__item__pre",
"Pulse.Syntax.Base.var",
"Pulse.Typing.fresh_wrt",
"Pulse.Syntax.Naming.freevars",
"Pulse.Syntax.Base.__proj__Mkst_comp__item__post",
"Pulse.Typing.Env.push_binding",
"Pulse.Syntax.Base.ppname_default",
"Pulse.Syntax.Naming.open_term",
"Prims.Mkdtuple2",
"Pulse.Syntax.Base.comp_st",
"Prims.l_and",
"Prims.eq2",
"Pulse.Syntax.Base.comp_res",
"Pulse.Syntax.Base.universe",
"Pulse.Syntax.Base.comp_u",
"Pulse.Syntax.Base.comp_post",
"Pulse.Typing.Combinators.frame_of",
"Pulse.Typing.T_Equiv",
"Pulse.Typing.st_equiv",
"Pulse.Typing.ST_VPropEquiv",
"FStar.Reflection.Typing.Rel_refl",
"Pulse.Typing.elab_env",
"Pulse.Elaborate.Pure.elab_term",
"FStar.Reflection.Typing.R_Eq",
"Pulse.Typing.VE_Refl",
"Prims.dtuple2",
"FStar.Pervasives.dtuple4",
"Pulse.Typing.Metatheory.Base.st_comp_typing_inversion",
"Pulse.Typing.st_comp_typing",
"Pulse.Typing.Metatheory.Base.comp_typing_inversion",
"Prims.unit",
"Prims._assert",
"Pulse.Syntax.Base.vprop",
"Pulse.Syntax.Base.with_st_comp",
"Pulse.Syntax.Base.st_comp",
"Pulse.Syntax.Base.Mkst_comp",
"Pulse.Typing.Metatheory.Base.comp_typing_u",
"Pulse.Typing.Metatheory.Base.st_typing_correctness",
"Pulse.Typing.T_Frame"
] | [] | module Pulse.Typing.Combinators
module RT = FStar.Reflection.Typing
module R = FStar.Reflection.V2
module L = FStar.List.Tot
module T = FStar.Tactics.V2
module P = Pulse.Syntax.Printer
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing
open Pulse.Checker.Pure
let rec vprop_equiv_typing (#g:_) (#t0 #t1:term) (v:vprop_equiv g t0 t1)
: GTot ((tot_typing g t0 tm_vprop -> tot_typing g t1 tm_vprop) &
(tot_typing g t1 tm_vprop -> tot_typing g t0 tm_vprop))
(decreases v)
= match v with
| VE_Refl _ _ -> (fun x -> x), (fun x -> x)
| VE_Sym _ _ _ v' ->
let f, g = vprop_equiv_typing v' in
g, f
| VE_Trans g t0 t2 t1 v02 v21 ->
let f02, f20 = vprop_equiv_typing v02 in
let f21, f12 = vprop_equiv_typing v21 in
(fun x -> f21 (f02 x)),
(fun x -> f20 (f12 x))
| VE_Ctxt g s0 s1 s0' s1' v0 v1 ->
let f0, f0' = vprop_equiv_typing v0 in
let f1, f1' = vprop_equiv_typing v1 in
let ff (x:tot_typing g (tm_star s0 s1) tm_vprop)
: tot_typing g (tm_star s0' s1') tm_vprop
= let s0_typing = star_typing_inversion_l x in
let s1_typing = star_typing_inversion_r x in
let s0'_typing, s1'_typing = f0 s0_typing, f1 s1_typing in
star_typing s0'_typing s1'_typing
in
let gg (x:tot_typing g (tm_star s0' s1') tm_vprop)
: tot_typing g (tm_star s0 s1) tm_vprop
= let s0'_typing = star_typing_inversion_l x in
let s1'_typing = star_typing_inversion_r x in
star_typing (f0' s0'_typing) (f1' s1'_typing)
in
ff, gg
| VE_Unit g t ->
let fwd (x:tot_typing g (tm_star tm_emp t) tm_vprop)
: tot_typing g t tm_vprop
= let r = star_typing_inversion_r x in
r
in
let bk (x:tot_typing g t tm_vprop)
: tot_typing g (tm_star tm_emp t) tm_vprop
= star_typing emp_typing x
in
fwd, bk
| VE_Comm g t0 t1 ->
let f t0 t1 (x:tot_typing g (tm_star t0 t1) tm_vprop)
: tot_typing g (tm_star t1 t0) tm_vprop
= let tt0 = star_typing_inversion_l x in
let tt1 = star_typing_inversion_r x in
star_typing tt1 tt0
in
f t0 t1, f t1 t0
| VE_Assoc g t0 t1 t2 ->
let fwd (x:tot_typing g (tm_star t0 (tm_star t1 t2)) tm_vprop)
: tot_typing g (tm_star (tm_star t0 t1) t2) tm_vprop
= let tt0 = star_typing_inversion_l x in
let tt12 = star_typing_inversion_r x in
let tt1 = star_typing_inversion_l tt12 in
let tt2 = star_typing_inversion_r tt12 in
star_typing (star_typing tt0 tt1) tt2
in
let bk (x : tot_typing g (tm_star (tm_star t0 t1) t2) tm_vprop)
: tot_typing g (tm_star t0 (tm_star t1 t2)) tm_vprop
= let tt01 = star_typing_inversion_l x in
let tt2 = star_typing_inversion_r x in
let tt0 = star_typing_inversion_l tt01 in
let tt1 = star_typing_inversion_r tt01 in
star_typing tt0 (star_typing tt1 tt2)
in
fwd, bk
| VE_Ext g t0 t1 token ->
let d1, d2 = vprop_eq_typing_inversion g t0 t1 token in
(fun _ -> d2),
(fun _ -> d1)
#push-options "--z3rlimit_factor 8 --ifuel 1 --fuel 2 --query_stats"
let rec mk_bind (g:env)
(pre:term)
(e1:st_term)
(e2:st_term)
(c1:comp_st)
(c2:comp_st)
(px:nvar { ~ (Set.mem (snd px) (dom g)) })
(d_e1:st_typing g e1 c1)
(d_c1res:tot_typing g (comp_res c1) (tm_type (comp_u c1)))
(d_e2:st_typing (push_binding g (snd px) (fst px) (comp_res c1)) (open_st_term_nv e2 px) c2)
(res_typing:universe_of g (comp_res c2) (comp_u c2))
(post_typing:tot_typing (push_binding g (snd px) (fst px) (comp_res c2))
(open_term_nv (comp_post c2) px)
tm_vprop)
: T.TacH (t:st_term &
c:comp_st { st_comp_of_comp c == st_comp_with_pre (st_comp_of_comp c2) pre } &
st_typing g t c)
(requires fun _ ->
let _, x = px in
comp_pre c1 == pre /\
None? (lookup g x) /\
(~(x `Set.mem` freevars_st e2)) /\
open_term (comp_post c1) x == comp_pre c2 /\
(~ (x `Set.mem` freevars (comp_post c2))))
(ensures fun _ _ -> True) =
let _, x = px in
let b = nvar_as_binder px (comp_res c1) in
match c1, c2 with
| C_ST _, C_ST _ ->
let bc = Bind_comp g x c1 c2 res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
| C_STGhost inames1 _, C_STGhost inames2 _ ->
if eq_tm inames1 inames2
then begin
let bc = Bind_comp g x c1 c2 res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose two stghost computations with different opened invariants"
| C_STAtomic inames _, C_ST _ ->
if eq_tm inames tm_emp_inames
then begin
let c1lifted = C_ST (st_comp_of_comp c1) in
let d_e1 : st_typing g e1 c1lifted =
T_Lift _ _ _ c1lifted d_e1 (Lift_STAtomic_ST _ c1) in
let bc = Bind_comp g x c1lifted c2 res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose atomic with non-emp opened invariants with stt"
| C_STGhost inames1 _, C_STAtomic inames2 _ ->
if eq_tm inames1 inames2
then begin
let w = get_non_informative_witness g (comp_u c1) (comp_res c1) in
let bc = Bind_comp_ghost_l g x c1 c2 w res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose ghost and atomic with different opened invariants"
| C_STAtomic inames1 _, C_STGhost inames2 _ ->
if eq_tm inames1 inames2
then begin
let w = get_non_informative_witness g (comp_u c2) (comp_res c2) in
let bc = Bind_comp_ghost_r g x c1 c2 w res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose atomic and ghost with different opened invariants"
| C_ST _, C_STAtomic inames _ ->
if eq_tm inames tm_emp_inames
then begin
let c2lifted = C_ST (st_comp_of_comp c2) in
let g' = push_binding g x (fst px) (comp_res c1) in
let d_e2 : st_typing g' (open_st_term_nv e2 px) c2lifted =
T_Lift _ _ _ c2lifted d_e2 (Lift_STAtomic_ST _ c2) in
let bc = Bind_comp g x c1 c2lifted res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose stt with atomic with non-emp opened invariants"
| C_STGhost inames _, C_ST _ ->
if eq_tm inames tm_emp_inames
then begin
let w = get_non_informative_witness g (comp_u c1) (comp_res c1) in
let c1lifted = C_STAtomic inames (st_comp_of_comp c1) in
let d_e1 : st_typing g e1 c1lifted =
T_Lift _ _ _ c1lifted d_e1 (Lift_STGhost_STAtomic g c1 w) in
mk_bind g pre e1 e2 c1lifted c2 px d_e1 d_c1res d_e2 res_typing post_typing
end
else fail g None "Cannot compose ghost with stt with non-emp opened invariants"
| C_ST _, C_STGhost inames _ ->
if eq_tm inames tm_emp_inames
then begin
let g' = push_binding g x (fst px) (comp_res c1) in
let w = get_non_informative_witness g' (comp_u c2) (comp_res c2) in
let c2lifted = C_STAtomic inames (st_comp_of_comp c2) in
let d_e2 : st_typing g' (open_st_term_nv e2 px) c2lifted =
T_Lift _ _ _ c2lifted d_e2 (Lift_STGhost_STAtomic g' c2 w) in
let (| t, c, d |) = mk_bind g pre e1 e2 c1 c2lifted px d_e1 d_c1res d_e2 res_typing post_typing in
(| t, c, d |)
end
else fail g None "Cannot compose stt with ghost with non-emp opened invariants"
| C_STAtomic inames _, C_STAtomic _ _ ->
if eq_tm inames tm_emp_inames
then begin
let c1lifted = C_ST (st_comp_of_comp c1) in
let d_e1 : st_typing g e1 c1lifted =
T_Lift _ _ _ c1lifted d_e1 (Lift_STAtomic_ST _ c1) in
mk_bind g pre e1 e2 c1lifted c2 px d_e1 d_c1res d_e2 res_typing post_typing
end
else fail g None "Cannot compose statomics with non-emp opened invariants"
| _, _ -> fail g None "bind either not implemented (e.g. ghost) or not possible"
#pop-options
let bind_res_and_post_typing (g:env) (s2:st_comp) (x:var { fresh_wrt x g (freevars s2.post) })
(post_hint:post_hint_opt g { comp_post_matches_hint (C_ST s2) post_hint })
: T.Tac (universe_of g s2.res s2.u &
tot_typing (push_binding g x ppname_default s2.res) (open_term_nv s2.post (v_as_nv x)) tm_vprop)
= match post_hint with
| None ->
(* We're inferring a post, so these checks are unavoidable *)
(* since we need to type the result in a smaller env g *)
let (| u, res_typing |) = check_universe g s2.res in
if not (eq_univ u s2.u)
then fail g None "Unexpected universe for result type"
else if x `Set.mem` freevars s2.post
then fail g None (Printf.sprintf "Bound variable %d escapes scope in postcondition %s" x (P.term_to_string s2.post))
else (
let y = x in //fresh g in
let s2_post_opened = open_term_nv s2.post (v_as_nv y) in
let post_typing =
check_vprop_with_core (push_binding g y ppname_default s2.res) s2_post_opened in
res_typing, post_typing
)
| Some post ->
if x `Set.mem` freevars s2.post
then fail g None "Unexpected mismatched postcondition in bind" //exclude with a stronger type on check'
else (
let pr = post_hint_typing g post x in
pr.ty_typing, pr.post_typing
)
let add_frame (#g:env) (#t:st_term) (#c:comp_st) (t_typing:st_typing g t c)
(#frame:vprop)
(frame_typing:tot_typing g frame tm_vprop)
: t':st_term &
c':comp_st { c' == add_frame c frame } &
st_typing g t' c' =
(| t, add_frame c frame, T_Frame _ _ _ _ frame_typing t_typing |)
let apply_frame (#g:env)
(#t:st_term)
(#ctxt:term)
(ctxt_typing: tot_typing g ctxt tm_vprop)
(#c:comp { stateful_comp c })
(t_typing: st_typing g t c)
(frame_t:frame_for_req_in_ctxt g ctxt (comp_pre c))
: Tot (c':comp_st { comp_pre c' == ctxt /\
comp_res c' == comp_res c /\
comp_u c' == comp_u c /\
comp_post c' == tm_star (comp_post c) (frame_of frame_t) } & | false | false | Pulse.Typing.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val apply_frame (#g:env)
(#t:st_term)
(#ctxt:term)
(ctxt_typing: tot_typing g ctxt tm_vprop)
(#c:comp { stateful_comp c })
(t_typing: st_typing g t c)
(frame_t:frame_for_req_in_ctxt g ctxt (comp_pre c))
: Tot (c':comp_st { comp_pre c' == ctxt /\
comp_res c' == comp_res c /\
comp_u c' == comp_u c /\
comp_post c' == tm_star (comp_post c) (frame_of frame_t) } &
st_typing g t c') | [] | Pulse.Typing.Combinators.apply_frame | {
"file_name": "lib/steel/pulse/Pulse.Typing.Combinators.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} |
ctxt_typing: Pulse.Typing.tot_typing g ctxt Pulse.Syntax.Base.tm_vprop ->
t_typing: Pulse.Typing.st_typing g t c ->
frame_t: Pulse.Typing.Combinators.frame_for_req_in_ctxt g ctxt (Pulse.Syntax.Base.comp_pre c)
-> Prims.dtuple2 (c':
Pulse.Syntax.Base.comp_st
{ Pulse.Syntax.Base.comp_pre c' == ctxt /\
Pulse.Syntax.Base.comp_res c' == Pulse.Syntax.Base.comp_res c /\
Pulse.Syntax.Base.comp_u c' == Pulse.Syntax.Base.comp_u c /\
Pulse.Syntax.Base.comp_post c' ==
Pulse.Syntax.Base.tm_star (Pulse.Syntax.Base.comp_post c)
(Pulse.Typing.Combinators.frame_of frame_t) })
(fun c' -> Pulse.Typing.st_typing g t c') | {
"end_col": 23,
"end_line": 272,
"start_col": 3,
"start_line": 255
} |
FStar.Tactics.Effect.Tac | val bind_res_and_post_typing (g:env) (s2:st_comp) (x:var { fresh_wrt x g (freevars s2.post) })
(post_hint:post_hint_opt g { comp_post_matches_hint (C_ST s2) post_hint })
: T.Tac (universe_of g s2.res s2.u &
tot_typing (push_binding g x ppname_default s2.res) (open_term_nv s2.post (v_as_nv x)) tm_vprop) | [
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Syntax.Printer",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let bind_res_and_post_typing (g:env) (s2:st_comp) (x:var { fresh_wrt x g (freevars s2.post) })
(post_hint:post_hint_opt g { comp_post_matches_hint (C_ST s2) post_hint })
: T.Tac (universe_of g s2.res s2.u &
tot_typing (push_binding g x ppname_default s2.res) (open_term_nv s2.post (v_as_nv x)) tm_vprop)
= match post_hint with
| None ->
(* We're inferring a post, so these checks are unavoidable *)
(* since we need to type the result in a smaller env g *)
let (| u, res_typing |) = check_universe g s2.res in
if not (eq_univ u s2.u)
then fail g None "Unexpected universe for result type"
else if x `Set.mem` freevars s2.post
then fail g None (Printf.sprintf "Bound variable %d escapes scope in postcondition %s" x (P.term_to_string s2.post))
else (
let y = x in //fresh g in
let s2_post_opened = open_term_nv s2.post (v_as_nv y) in
let post_typing =
check_vprop_with_core (push_binding g y ppname_default s2.res) s2_post_opened in
res_typing, post_typing
)
| Some post ->
if x `Set.mem` freevars s2.post
then fail g None "Unexpected mismatched postcondition in bind" //exclude with a stronger type on check'
else (
let pr = post_hint_typing g post x in
pr.ty_typing, pr.post_typing
) | val bind_res_and_post_typing (g:env) (s2:st_comp) (x:var { fresh_wrt x g (freevars s2.post) })
(post_hint:post_hint_opt g { comp_post_matches_hint (C_ST s2) post_hint })
: T.Tac (universe_of g s2.res s2.u &
tot_typing (push_binding g x ppname_default s2.res) (open_term_nv s2.post (v_as_nv x)) tm_vprop)
let bind_res_and_post_typing
(g: env)
(s2: st_comp)
(x: var{fresh_wrt x g (freevars s2.post)})
(post_hint: post_hint_opt g {comp_post_matches_hint (C_ST s2) post_hint})
: T.Tac
(universe_of g s2.res s2.u &
tot_typing (push_binding g x ppname_default s2.res)
(open_term_nv s2.post (v_as_nv x))
tm_vprop) = | true | null | false | match post_hint with
| None ->
let (| u , res_typing |) = check_universe g s2.res in
if not (eq_univ u s2.u)
then fail g None "Unexpected universe for result type"
else
if x `Set.mem` (freevars s2.post)
then
fail g
None
(Printf.sprintf "Bound variable %d escapes scope in postcondition %s"
x
(P.term_to_string s2.post))
else
(let y = x in
let s2_post_opened = open_term_nv s2.post (v_as_nv y) in
let post_typing =
check_vprop_with_core (push_binding g y ppname_default s2.res) s2_post_opened
in
res_typing, post_typing)
| Some post ->
if x `Set.mem` (freevars s2.post)
then fail g None "Unexpected mismatched postcondition in bind"
else
(let pr = post_hint_typing g post x in
pr.ty_typing, pr.post_typing) | {
"checked_file": "Pulse.Typing.Combinators.fst.checked",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.Printer.fsti.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": true,
"source_file": "Pulse.Typing.Combinators.fst"
} | [] | [
"Pulse.Typing.Env.env",
"Pulse.Syntax.Base.st_comp",
"Pulse.Syntax.Base.var",
"Pulse.Typing.fresh_wrt",
"Pulse.Syntax.Naming.freevars",
"Pulse.Syntax.Base.__proj__Mkst_comp__item__post",
"Pulse.Typing.post_hint_opt",
"Pulse.Typing.comp_post_matches_hint",
"Pulse.Syntax.Base.C_ST",
"Pulse.Syntax.Base.universe",
"Pulse.Typing.universe_of",
"Pulse.Syntax.Base.__proj__Mkst_comp__item__res",
"Prims.op_Negation",
"Pulse.Syntax.Base.eq_univ",
"Pulse.Syntax.Base.__proj__Mkst_comp__item__u",
"Pulse.Typing.Env.fail",
"FStar.Pervasives.Native.tuple2",
"Pulse.Typing.tot_typing",
"Pulse.Typing.Env.push_binding",
"Pulse.Syntax.Base.ppname_default",
"Pulse.Syntax.Naming.open_term_nv",
"Pulse.Syntax.Base.v_as_nv",
"Pulse.Syntax.Base.tm_vprop",
"FStar.Pervasives.Native.None",
"Pulse.Syntax.Base.range",
"Prims.bool",
"FStar.Set.mem",
"Prims.string",
"FStar.Printf.sprintf",
"Pulse.Syntax.Printer.term_to_string",
"FStar.Pervasives.Native.Mktuple2",
"Pulse.Checker.Pure.check_vprop_with_core",
"Pulse.Syntax.Base.term",
"Prims.dtuple2",
"Pulse.Checker.Pure.check_universe",
"Pulse.Typing.post_hint_t",
"Pulse.Typing.__proj__Mkpost_hint_typing_t__item__ty_typing",
"Pulse.Typing.__proj__Mkpost_hint_typing_t__item__post_typing",
"Pulse.Typing.post_hint_typing_t",
"Pulse.Typing.post_hint_typing"
] | [] | module Pulse.Typing.Combinators
module RT = FStar.Reflection.Typing
module R = FStar.Reflection.V2
module L = FStar.List.Tot
module T = FStar.Tactics.V2
module P = Pulse.Syntax.Printer
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing
open Pulse.Checker.Pure
let rec vprop_equiv_typing (#g:_) (#t0 #t1:term) (v:vprop_equiv g t0 t1)
: GTot ((tot_typing g t0 tm_vprop -> tot_typing g t1 tm_vprop) &
(tot_typing g t1 tm_vprop -> tot_typing g t0 tm_vprop))
(decreases v)
= match v with
| VE_Refl _ _ -> (fun x -> x), (fun x -> x)
| VE_Sym _ _ _ v' ->
let f, g = vprop_equiv_typing v' in
g, f
| VE_Trans g t0 t2 t1 v02 v21 ->
let f02, f20 = vprop_equiv_typing v02 in
let f21, f12 = vprop_equiv_typing v21 in
(fun x -> f21 (f02 x)),
(fun x -> f20 (f12 x))
| VE_Ctxt g s0 s1 s0' s1' v0 v1 ->
let f0, f0' = vprop_equiv_typing v0 in
let f1, f1' = vprop_equiv_typing v1 in
let ff (x:tot_typing g (tm_star s0 s1) tm_vprop)
: tot_typing g (tm_star s0' s1') tm_vprop
= let s0_typing = star_typing_inversion_l x in
let s1_typing = star_typing_inversion_r x in
let s0'_typing, s1'_typing = f0 s0_typing, f1 s1_typing in
star_typing s0'_typing s1'_typing
in
let gg (x:tot_typing g (tm_star s0' s1') tm_vprop)
: tot_typing g (tm_star s0 s1) tm_vprop
= let s0'_typing = star_typing_inversion_l x in
let s1'_typing = star_typing_inversion_r x in
star_typing (f0' s0'_typing) (f1' s1'_typing)
in
ff, gg
| VE_Unit g t ->
let fwd (x:tot_typing g (tm_star tm_emp t) tm_vprop)
: tot_typing g t tm_vprop
= let r = star_typing_inversion_r x in
r
in
let bk (x:tot_typing g t tm_vprop)
: tot_typing g (tm_star tm_emp t) tm_vprop
= star_typing emp_typing x
in
fwd, bk
| VE_Comm g t0 t1 ->
let f t0 t1 (x:tot_typing g (tm_star t0 t1) tm_vprop)
: tot_typing g (tm_star t1 t0) tm_vprop
= let tt0 = star_typing_inversion_l x in
let tt1 = star_typing_inversion_r x in
star_typing tt1 tt0
in
f t0 t1, f t1 t0
| VE_Assoc g t0 t1 t2 ->
let fwd (x:tot_typing g (tm_star t0 (tm_star t1 t2)) tm_vprop)
: tot_typing g (tm_star (tm_star t0 t1) t2) tm_vprop
= let tt0 = star_typing_inversion_l x in
let tt12 = star_typing_inversion_r x in
let tt1 = star_typing_inversion_l tt12 in
let tt2 = star_typing_inversion_r tt12 in
star_typing (star_typing tt0 tt1) tt2
in
let bk (x : tot_typing g (tm_star (tm_star t0 t1) t2) tm_vprop)
: tot_typing g (tm_star t0 (tm_star t1 t2)) tm_vprop
= let tt01 = star_typing_inversion_l x in
let tt2 = star_typing_inversion_r x in
let tt0 = star_typing_inversion_l tt01 in
let tt1 = star_typing_inversion_r tt01 in
star_typing tt0 (star_typing tt1 tt2)
in
fwd, bk
| VE_Ext g t0 t1 token ->
let d1, d2 = vprop_eq_typing_inversion g t0 t1 token in
(fun _ -> d2),
(fun _ -> d1)
#push-options "--z3rlimit_factor 8 --ifuel 1 --fuel 2 --query_stats"
let rec mk_bind (g:env)
(pre:term)
(e1:st_term)
(e2:st_term)
(c1:comp_st)
(c2:comp_st)
(px:nvar { ~ (Set.mem (snd px) (dom g)) })
(d_e1:st_typing g e1 c1)
(d_c1res:tot_typing g (comp_res c1) (tm_type (comp_u c1)))
(d_e2:st_typing (push_binding g (snd px) (fst px) (comp_res c1)) (open_st_term_nv e2 px) c2)
(res_typing:universe_of g (comp_res c2) (comp_u c2))
(post_typing:tot_typing (push_binding g (snd px) (fst px) (comp_res c2))
(open_term_nv (comp_post c2) px)
tm_vprop)
: T.TacH (t:st_term &
c:comp_st { st_comp_of_comp c == st_comp_with_pre (st_comp_of_comp c2) pre } &
st_typing g t c)
(requires fun _ ->
let _, x = px in
comp_pre c1 == pre /\
None? (lookup g x) /\
(~(x `Set.mem` freevars_st e2)) /\
open_term (comp_post c1) x == comp_pre c2 /\
(~ (x `Set.mem` freevars (comp_post c2))))
(ensures fun _ _ -> True) =
let _, x = px in
let b = nvar_as_binder px (comp_res c1) in
match c1, c2 with
| C_ST _, C_ST _ ->
let bc = Bind_comp g x c1 c2 res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
| C_STGhost inames1 _, C_STGhost inames2 _ ->
if eq_tm inames1 inames2
then begin
let bc = Bind_comp g x c1 c2 res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose two stghost computations with different opened invariants"
| C_STAtomic inames _, C_ST _ ->
if eq_tm inames tm_emp_inames
then begin
let c1lifted = C_ST (st_comp_of_comp c1) in
let d_e1 : st_typing g e1 c1lifted =
T_Lift _ _ _ c1lifted d_e1 (Lift_STAtomic_ST _ c1) in
let bc = Bind_comp g x c1lifted c2 res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose atomic with non-emp opened invariants with stt"
| C_STGhost inames1 _, C_STAtomic inames2 _ ->
if eq_tm inames1 inames2
then begin
let w = get_non_informative_witness g (comp_u c1) (comp_res c1) in
let bc = Bind_comp_ghost_l g x c1 c2 w res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose ghost and atomic with different opened invariants"
| C_STAtomic inames1 _, C_STGhost inames2 _ ->
if eq_tm inames1 inames2
then begin
let w = get_non_informative_witness g (comp_u c2) (comp_res c2) in
let bc = Bind_comp_ghost_r g x c1 c2 w res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose atomic and ghost with different opened invariants"
| C_ST _, C_STAtomic inames _ ->
if eq_tm inames tm_emp_inames
then begin
let c2lifted = C_ST (st_comp_of_comp c2) in
let g' = push_binding g x (fst px) (comp_res c1) in
let d_e2 : st_typing g' (open_st_term_nv e2 px) c2lifted =
T_Lift _ _ _ c2lifted d_e2 (Lift_STAtomic_ST _ c2) in
let bc = Bind_comp g x c1 c2lifted res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose stt with atomic with non-emp opened invariants"
| C_STGhost inames _, C_ST _ ->
if eq_tm inames tm_emp_inames
then begin
let w = get_non_informative_witness g (comp_u c1) (comp_res c1) in
let c1lifted = C_STAtomic inames (st_comp_of_comp c1) in
let d_e1 : st_typing g e1 c1lifted =
T_Lift _ _ _ c1lifted d_e1 (Lift_STGhost_STAtomic g c1 w) in
mk_bind g pre e1 e2 c1lifted c2 px d_e1 d_c1res d_e2 res_typing post_typing
end
else fail g None "Cannot compose ghost with stt with non-emp opened invariants"
| C_ST _, C_STGhost inames _ ->
if eq_tm inames tm_emp_inames
then begin
let g' = push_binding g x (fst px) (comp_res c1) in
let w = get_non_informative_witness g' (comp_u c2) (comp_res c2) in
let c2lifted = C_STAtomic inames (st_comp_of_comp c2) in
let d_e2 : st_typing g' (open_st_term_nv e2 px) c2lifted =
T_Lift _ _ _ c2lifted d_e2 (Lift_STGhost_STAtomic g' c2 w) in
let (| t, c, d |) = mk_bind g pre e1 e2 c1 c2lifted px d_e1 d_c1res d_e2 res_typing post_typing in
(| t, c, d |)
end
else fail g None "Cannot compose stt with ghost with non-emp opened invariants"
| C_STAtomic inames _, C_STAtomic _ _ ->
if eq_tm inames tm_emp_inames
then begin
let c1lifted = C_ST (st_comp_of_comp c1) in
let d_e1 : st_typing g e1 c1lifted =
T_Lift _ _ _ c1lifted d_e1 (Lift_STAtomic_ST _ c1) in
mk_bind g pre e1 e2 c1lifted c2 px d_e1 d_c1res d_e2 res_typing post_typing
end
else fail g None "Cannot compose statomics with non-emp opened invariants"
| _, _ -> fail g None "bind either not implemented (e.g. ghost) or not possible"
#pop-options
let bind_res_and_post_typing (g:env) (s2:st_comp) (x:var { fresh_wrt x g (freevars s2.post) })
(post_hint:post_hint_opt g { comp_post_matches_hint (C_ST s2) post_hint })
: T.Tac (universe_of g s2.res s2.u & | false | false | Pulse.Typing.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val bind_res_and_post_typing (g:env) (s2:st_comp) (x:var { fresh_wrt x g (freevars s2.post) })
(post_hint:post_hint_opt g { comp_post_matches_hint (C_ST s2) post_hint })
: T.Tac (universe_of g s2.res s2.u &
tot_typing (push_binding g x ppname_default s2.res) (open_term_nv s2.post (v_as_nv x)) tm_vprop) | [] | Pulse.Typing.Combinators.bind_res_and_post_typing | {
"file_name": "lib/steel/pulse/Pulse.Typing.Combinators.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} |
g: Pulse.Typing.Env.env ->
s2: Pulse.Syntax.Base.st_comp ->
x:
Pulse.Syntax.Base.var
{Pulse.Typing.fresh_wrt x g (Pulse.Syntax.Naming.freevars (Mkst_comp?.post s2))} ->
post_hint:
Pulse.Typing.post_hint_opt g
{Pulse.Typing.comp_post_matches_hint (Pulse.Syntax.Base.C_ST s2) post_hint}
-> FStar.Tactics.Effect.Tac
(Pulse.Typing.universe_of g (Mkst_comp?.res s2) (Mkst_comp?.u s2) *
Pulse.Typing.tot_typing (Pulse.Typing.Env.push_binding g
x
Pulse.Syntax.Base.ppname_default
(Mkst_comp?.res s2))
(Pulse.Syntax.Naming.open_term_nv (Mkst_comp?.post s2) (Pulse.Syntax.Base.v_as_nv x))
Pulse.Syntax.Base.tm_vprop) | {
"end_col": 7,
"end_line": 232,
"start_col": 4,
"start_line": 210
} |
FStar.Tactics.Effect.TacH | val mk_bind (g:env)
(pre:term)
(e1:st_term)
(e2:st_term)
(c1:comp_st)
(c2:comp_st)
(px:nvar { ~ (Set.mem (snd px) (dom g)) })
(d_e1:st_typing g e1 c1)
(d_c1res:tot_typing g (comp_res c1) (tm_type (comp_u c1)))
(d_e2:st_typing (push_binding g (snd px) (fst px) (comp_res c1)) (open_st_term_nv e2 px) c2)
(res_typing:universe_of g (comp_res c2) (comp_u c2))
(post_typing:tot_typing (push_binding g (snd px) (fst px) (comp_res c2))
(open_term_nv (comp_post c2) px)
tm_vprop)
: T.TacH (t:st_term &
c:comp_st { st_comp_of_comp c == st_comp_with_pre (st_comp_of_comp c2) pre } &
st_typing g t c)
(requires fun _ ->
let _, x = px in
comp_pre c1 == pre /\
None? (lookup g x) /\
(~(x `Set.mem` freevars_st e2)) /\
open_term (comp_post c1) x == comp_pre c2 /\
(~ (x `Set.mem` freevars (comp_post c2))))
(ensures fun _ _ -> True) | [
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Syntax.Printer",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec mk_bind (g:env)
(pre:term)
(e1:st_term)
(e2:st_term)
(c1:comp_st)
(c2:comp_st)
(px:nvar { ~ (Set.mem (snd px) (dom g)) })
(d_e1:st_typing g e1 c1)
(d_c1res:tot_typing g (comp_res c1) (tm_type (comp_u c1)))
(d_e2:st_typing (push_binding g (snd px) (fst px) (comp_res c1)) (open_st_term_nv e2 px) c2)
(res_typing:universe_of g (comp_res c2) (comp_u c2))
(post_typing:tot_typing (push_binding g (snd px) (fst px) (comp_res c2))
(open_term_nv (comp_post c2) px)
tm_vprop)
: T.TacH (t:st_term &
c:comp_st { st_comp_of_comp c == st_comp_with_pre (st_comp_of_comp c2) pre } &
st_typing g t c)
(requires fun _ ->
let _, x = px in
comp_pre c1 == pre /\
None? (lookup g x) /\
(~(x `Set.mem` freevars_st e2)) /\
open_term (comp_post c1) x == comp_pre c2 /\
(~ (x `Set.mem` freevars (comp_post c2))))
(ensures fun _ _ -> True) =
let _, x = px in
let b = nvar_as_binder px (comp_res c1) in
match c1, c2 with
| C_ST _, C_ST _ ->
let bc = Bind_comp g x c1 c2 res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
| C_STGhost inames1 _, C_STGhost inames2 _ ->
if eq_tm inames1 inames2
then begin
let bc = Bind_comp g x c1 c2 res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose two stghost computations with different opened invariants"
| C_STAtomic inames _, C_ST _ ->
if eq_tm inames tm_emp_inames
then begin
let c1lifted = C_ST (st_comp_of_comp c1) in
let d_e1 : st_typing g e1 c1lifted =
T_Lift _ _ _ c1lifted d_e1 (Lift_STAtomic_ST _ c1) in
let bc = Bind_comp g x c1lifted c2 res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose atomic with non-emp opened invariants with stt"
| C_STGhost inames1 _, C_STAtomic inames2 _ ->
if eq_tm inames1 inames2
then begin
let w = get_non_informative_witness g (comp_u c1) (comp_res c1) in
let bc = Bind_comp_ghost_l g x c1 c2 w res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose ghost and atomic with different opened invariants"
| C_STAtomic inames1 _, C_STGhost inames2 _ ->
if eq_tm inames1 inames2
then begin
let w = get_non_informative_witness g (comp_u c2) (comp_res c2) in
let bc = Bind_comp_ghost_r g x c1 c2 w res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose atomic and ghost with different opened invariants"
| C_ST _, C_STAtomic inames _ ->
if eq_tm inames tm_emp_inames
then begin
let c2lifted = C_ST (st_comp_of_comp c2) in
let g' = push_binding g x (fst px) (comp_res c1) in
let d_e2 : st_typing g' (open_st_term_nv e2 px) c2lifted =
T_Lift _ _ _ c2lifted d_e2 (Lift_STAtomic_ST _ c2) in
let bc = Bind_comp g x c1 c2lifted res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
end
else fail g None "Cannot compose stt with atomic with non-emp opened invariants"
| C_STGhost inames _, C_ST _ ->
if eq_tm inames tm_emp_inames
then begin
let w = get_non_informative_witness g (comp_u c1) (comp_res c1) in
let c1lifted = C_STAtomic inames (st_comp_of_comp c1) in
let d_e1 : st_typing g e1 c1lifted =
T_Lift _ _ _ c1lifted d_e1 (Lift_STGhost_STAtomic g c1 w) in
mk_bind g pre e1 e2 c1lifted c2 px d_e1 d_c1res d_e2 res_typing post_typing
end
else fail g None "Cannot compose ghost with stt with non-emp opened invariants"
| C_ST _, C_STGhost inames _ ->
if eq_tm inames tm_emp_inames
then begin
let g' = push_binding g x (fst px) (comp_res c1) in
let w = get_non_informative_witness g' (comp_u c2) (comp_res c2) in
let c2lifted = C_STAtomic inames (st_comp_of_comp c2) in
let d_e2 : st_typing g' (open_st_term_nv e2 px) c2lifted =
T_Lift _ _ _ c2lifted d_e2 (Lift_STGhost_STAtomic g' c2 w) in
let (| t, c, d |) = mk_bind g pre e1 e2 c1 c2lifted px d_e1 d_c1res d_e2 res_typing post_typing in
(| t, c, d |)
end
else fail g None "Cannot compose stt with ghost with non-emp opened invariants"
| C_STAtomic inames _, C_STAtomic _ _ ->
if eq_tm inames tm_emp_inames
then begin
let c1lifted = C_ST (st_comp_of_comp c1) in
let d_e1 : st_typing g e1 c1lifted =
T_Lift _ _ _ c1lifted d_e1 (Lift_STAtomic_ST _ c1) in
mk_bind g pre e1 e2 c1lifted c2 px d_e1 d_c1res d_e2 res_typing post_typing
end
else fail g None "Cannot compose statomics with non-emp opened invariants"
| _, _ -> fail g None "bind either not implemented (e.g. ghost) or not possible" | val mk_bind (g:env)
(pre:term)
(e1:st_term)
(e2:st_term)
(c1:comp_st)
(c2:comp_st)
(px:nvar { ~ (Set.mem (snd px) (dom g)) })
(d_e1:st_typing g e1 c1)
(d_c1res:tot_typing g (comp_res c1) (tm_type (comp_u c1)))
(d_e2:st_typing (push_binding g (snd px) (fst px) (comp_res c1)) (open_st_term_nv e2 px) c2)
(res_typing:universe_of g (comp_res c2) (comp_u c2))
(post_typing:tot_typing (push_binding g (snd px) (fst px) (comp_res c2))
(open_term_nv (comp_post c2) px)
tm_vprop)
: T.TacH (t:st_term &
c:comp_st { st_comp_of_comp c == st_comp_with_pre (st_comp_of_comp c2) pre } &
st_typing g t c)
(requires fun _ ->
let _, x = px in
comp_pre c1 == pre /\
None? (lookup g x) /\
(~(x `Set.mem` freevars_st e2)) /\
open_term (comp_post c1) x == comp_pre c2 /\
(~ (x `Set.mem` freevars (comp_post c2))))
(ensures fun _ _ -> True)
let rec mk_bind
(g: env)
(pre: term)
(e1 e2: st_term)
(c1 c2: comp_st)
(px: nvar{~(Set.mem (snd px) (dom g))})
(d_e1: st_typing g e1 c1)
(d_c1res: tot_typing g (comp_res c1) (tm_type (comp_u c1)))
(d_e2: st_typing (push_binding g (snd px) (fst px) (comp_res c1)) (open_st_term_nv e2 px) c2)
(res_typing: universe_of g (comp_res c2) (comp_u c2))
(post_typing:
tot_typing (push_binding g (snd px) (fst px) (comp_res c2))
(open_term_nv (comp_post c2) px)
tm_vprop)
: T.TacH
(t: st_term &
c: comp_st{st_comp_of_comp c == st_comp_with_pre (st_comp_of_comp c2) pre} &
st_typing g t c)
(requires
fun _ ->
let _, x = px in
comp_pre c1 == pre /\ None? (lookup g x) /\ (~(x `Set.mem` (freevars_st e2))) /\
open_term (comp_post c1) x == comp_pre c2 /\ (~(x `Set.mem` (freevars (comp_post c2)))))
(ensures fun _ _ -> True) = | true | null | false | let _, x = px in
let b = nvar_as_binder px (comp_res c1) in
match c1, c2 with
| C_ST _, C_ST _ ->
let bc = Bind_comp g x c1 c2 res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
| C_STGhost inames1 _, C_STGhost inames2 _ ->
if eq_tm inames1 inames2
then
let bc = Bind_comp g x c1 c2 res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
else fail g None "Cannot compose two stghost computations with different opened invariants"
| C_STAtomic inames _, C_ST _ ->
if eq_tm inames tm_emp_inames
then
let c1lifted = C_ST (st_comp_of_comp c1) in
let d_e1:st_typing g e1 c1lifted = T_Lift _ _ _ c1lifted d_e1 (Lift_STAtomic_ST _ c1) in
let bc = Bind_comp g x c1lifted c2 res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
else fail g None "Cannot compose atomic with non-emp opened invariants with stt"
| C_STGhost inames1 _, C_STAtomic inames2 _ ->
if eq_tm inames1 inames2
then
let w = get_non_informative_witness g (comp_u c1) (comp_res c1) in
let bc = Bind_comp_ghost_l g x c1 c2 w res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
else fail g None "Cannot compose ghost and atomic with different opened invariants"
| C_STAtomic inames1 _, C_STGhost inames2 _ ->
if eq_tm inames1 inames2
then
let w = get_non_informative_witness g (comp_u c2) (comp_res c2) in
let bc = Bind_comp_ghost_r g x c1 c2 w res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
else fail g None "Cannot compose atomic and ghost with different opened invariants"
| C_ST _, C_STAtomic inames _ ->
if eq_tm inames tm_emp_inames
then
let c2lifted = C_ST (st_comp_of_comp c2) in
let g' = push_binding g x (fst px) (comp_res c1) in
let d_e2:st_typing g' (open_st_term_nv e2 px) c2lifted =
T_Lift _ _ _ c2lifted d_e2 (Lift_STAtomic_ST _ c2)
in
let bc = Bind_comp g x c1 c2lifted res_typing x post_typing in
(| _, _, T_Bind _ e1 e2 _ _ b _ _ d_e1 d_c1res d_e2 bc |)
else fail g None "Cannot compose stt with atomic with non-emp opened invariants"
| C_STGhost inames _, C_ST _ ->
if eq_tm inames tm_emp_inames
then
let w = get_non_informative_witness g (comp_u c1) (comp_res c1) in
let c1lifted = C_STAtomic inames (st_comp_of_comp c1) in
let d_e1:st_typing g e1 c1lifted = T_Lift _ _ _ c1lifted d_e1 (Lift_STGhost_STAtomic g c1 w) in
mk_bind g pre e1 e2 c1lifted c2 px d_e1 d_c1res d_e2 res_typing post_typing
else fail g None "Cannot compose ghost with stt with non-emp opened invariants"
| C_ST _, C_STGhost inames _ ->
if eq_tm inames tm_emp_inames
then
let g' = push_binding g x (fst px) (comp_res c1) in
let w = get_non_informative_witness g' (comp_u c2) (comp_res c2) in
let c2lifted = C_STAtomic inames (st_comp_of_comp c2) in
let d_e2:st_typing g' (open_st_term_nv e2 px) c2lifted =
T_Lift _ _ _ c2lifted d_e2 (Lift_STGhost_STAtomic g' c2 w)
in
let (| t , c , d |) =
mk_bind g pre e1 e2 c1 c2lifted px d_e1 d_c1res d_e2 res_typing post_typing
in
(| t, c, d |)
else fail g None "Cannot compose stt with ghost with non-emp opened invariants"
| C_STAtomic inames _, C_STAtomic _ _ ->
if eq_tm inames tm_emp_inames
then
let c1lifted = C_ST (st_comp_of_comp c1) in
let d_e1:st_typing g e1 c1lifted = T_Lift _ _ _ c1lifted d_e1 (Lift_STAtomic_ST _ c1) in
mk_bind g pre e1 e2 c1lifted c2 px d_e1 d_c1res d_e2 res_typing post_typing
else fail g None "Cannot compose statomics with non-emp opened invariants"
| _, _ -> fail g None "bind either not implemented (e.g. ghost) or not possible" | {
"checked_file": "Pulse.Typing.Combinators.fst.checked",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.Printer.fsti.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": true,
"source_file": "Pulse.Typing.Combinators.fst"
} | [] | [
"Pulse.Typing.Env.env",
"Pulse.Syntax.Base.term",
"Pulse.Syntax.Base.st_term",
"Pulse.Syntax.Base.comp_st",
"Pulse.Syntax.Base.nvar",
"Prims.l_not",
"Prims.b2t",
"FStar.Set.mem",
"Pulse.Syntax.Base.var",
"FStar.Pervasives.Native.snd",
"Pulse.Syntax.Base.ppname",
"Pulse.Typing.Env.dom",
"Pulse.Typing.st_typing",
"Pulse.Typing.tot_typing",
"Pulse.Syntax.Base.comp_res",
"Pulse.Syntax.Pure.tm_type",
"Pulse.Syntax.Base.comp_u",
"Pulse.Typing.Env.push_binding",
"FStar.Pervasives.Native.fst",
"Pulse.Syntax.Naming.open_st_term_nv",
"Pulse.Typing.universe_of",
"Pulse.Syntax.Naming.open_term_nv",
"Pulse.Syntax.Base.comp_post",
"Pulse.Syntax.Base.tm_vprop",
"FStar.Pervasives.Native.Mktuple2",
"Pulse.Syntax.Base.comp",
"Pulse.Syntax.Base.st_comp",
"FStar.Pervasives.Mkdtuple3",
"Prims.eq2",
"Pulse.Syntax.Base.st_comp_of_comp",
"Pulse.Typing.Combinators.st_comp_with_pre",
"Pulse.Typing.wr",
"Pulse.Typing.bind_comp_out",
"Pulse.Syntax.Base.Tm_Bind",
"Pulse.Syntax.Base.Mkst_term'__Tm_Bind__payload",
"Pulse.Typing.T_Bind",
"Pulse.Typing.bind_comp",
"Pulse.Typing.Bind_comp",
"FStar.Pervasives.dtuple3",
"Pulse.Syntax.Base.eq_tm",
"Prims.bool",
"Pulse.Typing.Env.fail",
"FStar.Pervasives.Native.None",
"Pulse.Syntax.Base.range",
"Pulse.Syntax.Base.tm_emp_inames",
"Pulse.Typing.T_Lift",
"Pulse.Typing.Lift_STAtomic_ST",
"Pulse.Syntax.Base.C_ST",
"Pulse.Typing.bind_comp_ghost_l_out",
"Pulse.Typing.Bind_comp_ghost_l",
"Pulse.Typing.non_informative_t",
"Pulse.Checker.Pure.get_non_informative_witness",
"Pulse.Typing.bind_comp_ghost_r_out",
"Pulse.Typing.Bind_comp_ghost_r",
"FStar.Reflection.Typing.fstar_top_env",
"Pulse.Typing.Env.fstar_env",
"Pulse.Typing.Combinators.mk_bind",
"Pulse.Typing.Lift_STGhost_STAtomic",
"Pulse.Syntax.Base.C_STAtomic",
"Pulse.Syntax.Base.binder",
"Pulse.Typing.Combinators.nvar_as_binder",
"FStar.Tactics.Types.proofstate",
"Prims.l_and",
"Pulse.Syntax.Base.comp_pre",
"FStar.Pervasives.Native.uu___is_None",
"Pulse.Syntax.Base.typ",
"Pulse.Typing.Env.lookup",
"Pulse.Syntax.Naming.freevars_st",
"Pulse.Syntax.Naming.open_term",
"Pulse.Syntax.Naming.freevars",
"FStar.Tactics.Result.__result",
"Prims.l_True"
] | [] | module Pulse.Typing.Combinators
module RT = FStar.Reflection.Typing
module R = FStar.Reflection.V2
module L = FStar.List.Tot
module T = FStar.Tactics.V2
module P = Pulse.Syntax.Printer
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing
open Pulse.Checker.Pure
let rec vprop_equiv_typing (#g:_) (#t0 #t1:term) (v:vprop_equiv g t0 t1)
: GTot ((tot_typing g t0 tm_vprop -> tot_typing g t1 tm_vprop) &
(tot_typing g t1 tm_vprop -> tot_typing g t0 tm_vprop))
(decreases v)
= match v with
| VE_Refl _ _ -> (fun x -> x), (fun x -> x)
| VE_Sym _ _ _ v' ->
let f, g = vprop_equiv_typing v' in
g, f
| VE_Trans g t0 t2 t1 v02 v21 ->
let f02, f20 = vprop_equiv_typing v02 in
let f21, f12 = vprop_equiv_typing v21 in
(fun x -> f21 (f02 x)),
(fun x -> f20 (f12 x))
| VE_Ctxt g s0 s1 s0' s1' v0 v1 ->
let f0, f0' = vprop_equiv_typing v0 in
let f1, f1' = vprop_equiv_typing v1 in
let ff (x:tot_typing g (tm_star s0 s1) tm_vprop)
: tot_typing g (tm_star s0' s1') tm_vprop
= let s0_typing = star_typing_inversion_l x in
let s1_typing = star_typing_inversion_r x in
let s0'_typing, s1'_typing = f0 s0_typing, f1 s1_typing in
star_typing s0'_typing s1'_typing
in
let gg (x:tot_typing g (tm_star s0' s1') tm_vprop)
: tot_typing g (tm_star s0 s1) tm_vprop
= let s0'_typing = star_typing_inversion_l x in
let s1'_typing = star_typing_inversion_r x in
star_typing (f0' s0'_typing) (f1' s1'_typing)
in
ff, gg
| VE_Unit g t ->
let fwd (x:tot_typing g (tm_star tm_emp t) tm_vprop)
: tot_typing g t tm_vprop
= let r = star_typing_inversion_r x in
r
in
let bk (x:tot_typing g t tm_vprop)
: tot_typing g (tm_star tm_emp t) tm_vprop
= star_typing emp_typing x
in
fwd, bk
| VE_Comm g t0 t1 ->
let f t0 t1 (x:tot_typing g (tm_star t0 t1) tm_vprop)
: tot_typing g (tm_star t1 t0) tm_vprop
= let tt0 = star_typing_inversion_l x in
let tt1 = star_typing_inversion_r x in
star_typing tt1 tt0
in
f t0 t1, f t1 t0
| VE_Assoc g t0 t1 t2 ->
let fwd (x:tot_typing g (tm_star t0 (tm_star t1 t2)) tm_vprop)
: tot_typing g (tm_star (tm_star t0 t1) t2) tm_vprop
= let tt0 = star_typing_inversion_l x in
let tt12 = star_typing_inversion_r x in
let tt1 = star_typing_inversion_l tt12 in
let tt2 = star_typing_inversion_r tt12 in
star_typing (star_typing tt0 tt1) tt2
in
let bk (x : tot_typing g (tm_star (tm_star t0 t1) t2) tm_vprop)
: tot_typing g (tm_star t0 (tm_star t1 t2)) tm_vprop
= let tt01 = star_typing_inversion_l x in
let tt2 = star_typing_inversion_r x in
let tt0 = star_typing_inversion_l tt01 in
let tt1 = star_typing_inversion_r tt01 in
star_typing tt0 (star_typing tt1 tt2)
in
fwd, bk
| VE_Ext g t0 t1 token ->
let d1, d2 = vprop_eq_typing_inversion g t0 t1 token in
(fun _ -> d2),
(fun _ -> d1)
#push-options "--z3rlimit_factor 8 --ifuel 1 --fuel 2 --query_stats"
let rec mk_bind (g:env)
(pre:term)
(e1:st_term)
(e2:st_term)
(c1:comp_st)
(c2:comp_st)
(px:nvar { ~ (Set.mem (snd px) (dom g)) })
(d_e1:st_typing g e1 c1)
(d_c1res:tot_typing g (comp_res c1) (tm_type (comp_u c1)))
(d_e2:st_typing (push_binding g (snd px) (fst px) (comp_res c1)) (open_st_term_nv e2 px) c2)
(res_typing:universe_of g (comp_res c2) (comp_u c2))
(post_typing:tot_typing (push_binding g (snd px) (fst px) (comp_res c2))
(open_term_nv (comp_post c2) px)
tm_vprop)
: T.TacH (t:st_term &
c:comp_st { st_comp_of_comp c == st_comp_with_pre (st_comp_of_comp c2) pre } &
st_typing g t c)
(requires fun _ ->
let _, x = px in
comp_pre c1 == pre /\
None? (lookup g x) /\
(~(x `Set.mem` freevars_st e2)) /\
open_term (comp_post c1) x == comp_pre c2 /\ | false | false | Pulse.Typing.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 2,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 8,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mk_bind (g:env)
(pre:term)
(e1:st_term)
(e2:st_term)
(c1:comp_st)
(c2:comp_st)
(px:nvar { ~ (Set.mem (snd px) (dom g)) })
(d_e1:st_typing g e1 c1)
(d_c1res:tot_typing g (comp_res c1) (tm_type (comp_u c1)))
(d_e2:st_typing (push_binding g (snd px) (fst px) (comp_res c1)) (open_st_term_nv e2 px) c2)
(res_typing:universe_of g (comp_res c2) (comp_u c2))
(post_typing:tot_typing (push_binding g (snd px) (fst px) (comp_res c2))
(open_term_nv (comp_post c2) px)
tm_vprop)
: T.TacH (t:st_term &
c:comp_st { st_comp_of_comp c == st_comp_with_pre (st_comp_of_comp c2) pre } &
st_typing g t c)
(requires fun _ ->
let _, x = px in
comp_pre c1 == pre /\
None? (lookup g x) /\
(~(x `Set.mem` freevars_st e2)) /\
open_term (comp_post c1) x == comp_pre c2 /\
(~ (x `Set.mem` freevars (comp_post c2))))
(ensures fun _ _ -> True) | [
"recursion"
] | Pulse.Typing.Combinators.mk_bind | {
"file_name": "lib/steel/pulse/Pulse.Typing.Combinators.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} |
g: Pulse.Typing.Env.env ->
pre: Pulse.Syntax.Base.term ->
e1: Pulse.Syntax.Base.st_term ->
e2: Pulse.Syntax.Base.st_term ->
c1: Pulse.Syntax.Base.comp_st ->
c2: Pulse.Syntax.Base.comp_st ->
px:
Pulse.Syntax.Base.nvar
{~(FStar.Set.mem (FStar.Pervasives.Native.snd px) (Pulse.Typing.Env.dom g))} ->
d_e1: Pulse.Typing.st_typing g e1 c1 ->
d_c1res:
Pulse.Typing.tot_typing g
(Pulse.Syntax.Base.comp_res c1)
(Pulse.Syntax.Pure.tm_type (Pulse.Syntax.Base.comp_u c1)) ->
d_e2:
Pulse.Typing.st_typing (Pulse.Typing.Env.push_binding g
(FStar.Pervasives.Native.snd px)
(FStar.Pervasives.Native.fst px)
(Pulse.Syntax.Base.comp_res c1))
(Pulse.Syntax.Naming.open_st_term_nv e2 px)
c2 ->
res_typing:
Pulse.Typing.universe_of g (Pulse.Syntax.Base.comp_res c2) (Pulse.Syntax.Base.comp_u c2) ->
post_typing:
Pulse.Typing.tot_typing (Pulse.Typing.Env.push_binding g
(FStar.Pervasives.Native.snd px)
(FStar.Pervasives.Native.fst px)
(Pulse.Syntax.Base.comp_res c2))
(Pulse.Syntax.Naming.open_term_nv (Pulse.Syntax.Base.comp_post c2) px)
Pulse.Syntax.Base.tm_vprop
-> FStar.Tactics.Effect.TacH
(FStar.Pervasives.dtuple3 Pulse.Syntax.Base.st_term
(fun _ ->
c:
Pulse.Syntax.Base.comp_st
{ Pulse.Syntax.Base.st_comp_of_comp c ==
Pulse.Typing.Combinators.st_comp_with_pre (Pulse.Syntax.Base.st_comp_of_comp c2) pre
})
(fun t c -> Pulse.Typing.st_typing g t c)) | {
"end_col": 82,
"end_line": 202,
"start_col": 38,
"start_line": 120
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let codes = va_codes | let codes = | false | null | false | va_codes | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_codes"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val codes : Type0 | [] | Vale.PPC64LE.QuickCodes.codes | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type0 | {
"end_col": 27,
"end_line": 15,
"start_col": 19,
"start_line": 15
} |
|
Prims.GTot | val precedes_wrap (#a: Type) (x y: a) : GTot Type0 | [
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y | val precedes_wrap (#a: Type) (x y: a) : GTot Type0
let precedes_wrap (#a: Type) (x y: a) : GTot Type0 = | false | null | false | precedes x y | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"sometrivial"
] | [
"Prims.precedes"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)] | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val precedes_wrap (#a: Type) (x y: a) : GTot Type0 | [] | Vale.PPC64LE.QuickCodes.precedes_wrap | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: a -> y: a -> Prims.GTot Type0 | {
"end_col": 63,
"end_line": 35,
"start_col": 51,
"start_line": 35
} |
Prims.Tot | val if_code (b: bool) (c1 c2: code) : code | [
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2 | val if_code (b: bool) (c1 c2: code) : code
let if_code (b: bool) (c1 c2: code) : code = | false | null | false | if b then c1 else c2 | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Prims.bool",
"Vale.PPC64LE.QuickCodes.code"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val if_code (b: bool) (c1 c2: code) : code | [] | Vale.PPC64LE.QuickCodes.if_code | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b: Prims.bool -> c1: Vale.PPC64LE.QuickCodes.code -> c2: Vale.PPC64LE.QuickCodes.code
-> Vale.PPC64LE.QuickCodes.code | {
"end_col": 70,
"end_line": 50,
"start_col": 50,
"start_line": 50
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let block = va_Block | let block = | false | null | false | va_Block | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_Block"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val block : block: Vale.PPC64LE.Decls.va_codes -> Vale.PPC64LE.Decls.va_code | [] | Vale.PPC64LE.QuickCodes.block | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | block: Vale.PPC64LE.Decls.va_codes -> Vale.PPC64LE.Decls.va_code | {
"end_col": 27,
"end_line": 168,
"start_col": 19,
"start_line": 168
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let wp_Bind_t (a:Type0) = va_state -> a -> Type0 | let wp_Bind_t (a: Type0) = | false | null | false | va_state -> a -> Type0 | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_state"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val wp_Bind_t : a: Type0 -> Type | [] | Vale.PPC64LE.QuickCodes.wp_Bind_t | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | a: Type0 -> Type | {
"end_col": 48,
"end_line": 100,
"start_col": 26,
"start_line": 100
} |
|
Prims.Tot | val wp_If
(#a: Type)
(#c1 #c2: code)
(b: cmp)
(qc1: quickCode a c1)
(qc2: quickCode a c2)
(mods: mods_t)
(s0: va_state)
(k: (va_state -> a -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
( eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k)) | val wp_If
(#a: Type)
(#c1 #c2: code)
(b: cmp)
(qc1: quickCode a c1)
(qc2: quickCode a c2)
(mods: mods_t)
(s0: va_state)
(k: (va_state -> a -> Type0))
: Type0
let wp_If
(#a: Type)
(#c1 #c2: code)
(b: cmp)
(qc1: quickCode a c1)
(qc2: quickCode a c2)
(mods: mods_t)
(s0: va_state)
(k: (va_state -> a -> Type0))
: Type0 = | false | null | false | valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
(eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k)) | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.code",
"Vale.PPC64LE.QuickCodes.cmp",
"Vale.PPC64LE.QuickCode.quickCode",
"Vale.PPC64LE.QuickCode.mods_t",
"Vale.PPC64LE.Decls.va_state",
"Prims.l_and",
"Vale.PPC64LE.QuickCodes.valid_cmp",
"Prims.b2t",
"Vale.PPC64LE.QuickCodes.mods_contains1",
"Vale.PPC64LE.QuickCode.Mod_cr0",
"Prims.l_imp",
"Vale.PPC64LE.QuickCodes.eval_cmp",
"Vale.PPC64LE.QuickCodes.mods_contains",
"Vale.PPC64LE.QuickCode.__proj__QProc__item__mods",
"Vale.PPC64LE.QuickCode.__proj__QProc__item__wp",
"Prims.op_Negation",
"Vale.PPC64LE.Machine_s.state",
"Vale.PPC64LE.Decls.va_upd_cr0",
"Vale.PPC64LE.Decls.eval_cmp_cr0",
"Vale.PPC64LE.QuickCodes.cmp_to_ocmp"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr]
let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2
[@va_qattr]
let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 = | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val wp_If
(#a: Type)
(#c1 #c2: code)
(b: cmp)
(qc1: quickCode a c1)
(qc2: quickCode a c2)
(mods: mods_t)
(s0: va_state)
(k: (va_state -> a -> Type0))
: Type0 | [] | Vale.PPC64LE.QuickCodes.wp_If | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
b: Vale.PPC64LE.QuickCodes.cmp ->
qc1: Vale.PPC64LE.QuickCode.quickCode a c1 ->
qc2: Vale.PPC64LE.QuickCode.quickCode a c2 ->
mods: Vale.PPC64LE.QuickCode.mods_t ->
s0: Vale.PPC64LE.Decls.va_state ->
k: (_: Vale.PPC64LE.Decls.va_state -> _: a -> Type0)
-> Type0 | {
"end_col": 80,
"end_line": 248,
"start_col": 2,
"start_line": 245
} |
Prims.Tot | val wp_While_body
(#a #d: Type)
(#c: code)
(b: cmp)
(qc: (a -> quickCode a c))
(mods: mods_t)
(inv: (va_state -> a -> Type0))
(dec: (va_state -> a -> d))
(g1: a)
(s1: va_state)
(k: (va_state -> a -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let wp_While_body
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g1:a) (s1:va_state) (k:va_state -> a -> Type0)
: Type0 =
valid_cmp b s1 /\
(let s1' = va_upd_cr0 (eval_cmp_cr0 s1 (cmp_to_ocmp b)) s1 in
( eval_cmp s1 b ==> mods_contains mods (qc g1).mods /\ QProc?.wp (qc g1) s1' (wp_While_inv qc mods inv dec s1 g1)) /\
(not (eval_cmp s1 b) ==> k s1' g1)) | val wp_While_body
(#a #d: Type)
(#c: code)
(b: cmp)
(qc: (a -> quickCode a c))
(mods: mods_t)
(inv: (va_state -> a -> Type0))
(dec: (va_state -> a -> d))
(g1: a)
(s1: va_state)
(k: (va_state -> a -> Type0))
: Type0
let wp_While_body
(#a #d: Type)
(#c: code)
(b: cmp)
(qc: (a -> quickCode a c))
(mods: mods_t)
(inv: (va_state -> a -> Type0))
(dec: (va_state -> a -> d))
(g1: a)
(s1: va_state)
(k: (va_state -> a -> Type0))
: Type0 = | false | null | false | valid_cmp b s1 /\
(let s1' = va_upd_cr0 (eval_cmp_cr0 s1 (cmp_to_ocmp b)) s1 in
(eval_cmp s1 b ==>
mods_contains mods (qc g1).mods /\ QProc?.wp (qc g1) s1' (wp_While_inv qc mods inv dec s1 g1)) /\
(not (eval_cmp s1 b) ==> k s1' g1)) | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.code",
"Vale.PPC64LE.QuickCodes.cmp",
"Vale.PPC64LE.QuickCode.quickCode",
"Vale.PPC64LE.QuickCode.mods_t",
"Vale.PPC64LE.Decls.va_state",
"Prims.l_and",
"Vale.PPC64LE.QuickCodes.valid_cmp",
"Prims.l_imp",
"Prims.b2t",
"Vale.PPC64LE.QuickCodes.eval_cmp",
"Vale.PPC64LE.QuickCodes.mods_contains",
"Vale.PPC64LE.QuickCode.__proj__QProc__item__mods",
"Vale.PPC64LE.QuickCode.__proj__QProc__item__wp",
"Vale.PPC64LE.QuickCodes.wp_While_inv",
"Prims.op_Negation",
"Vale.PPC64LE.Machine_s.state",
"Vale.PPC64LE.Decls.va_upd_cr0",
"Vale.PPC64LE.Decls.eval_cmp_cr0",
"Vale.PPC64LE.QuickCodes.cmp_to_ocmp"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr]
let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2
[@va_qattr]
let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
( eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k))
val qIf_proof (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_If b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (IfElse (cmp_to_ocmp b) c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (IfElse (cmp_to_ocmp b) c1 c2) =
QProc (IfElse (cmp_to_ocmp b) c1 c2) mods (wp_If b qc1 qc2 mods) (qIf_proof b qc1 qc2 mods)
///// While
[@va_qattr]
let wp_While_inv
(#a #d:Type) (#c:code) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (s1:va_state) (g1:a) (s2:va_state) (g2:a)
: Type0 =
s2.ok /\ inv s2 g2 /\ mods_contains mods (qc g2).mods /\ dec s2 g2 << dec s1 g1
[@va_qattr]
let wp_While_body
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g1:a) (s1:va_state) (k:va_state -> a -> Type0) | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val wp_While_body
(#a #d: Type)
(#c: code)
(b: cmp)
(qc: (a -> quickCode a c))
(mods: mods_t)
(inv: (va_state -> a -> Type0))
(dec: (va_state -> a -> d))
(g1: a)
(s1: va_state)
(k: (va_state -> a -> Type0))
: Type0 | [] | Vale.PPC64LE.QuickCodes.wp_While_body | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
b: Vale.PPC64LE.QuickCodes.cmp ->
qc: (_: a -> Vale.PPC64LE.QuickCode.quickCode a c) ->
mods: Vale.PPC64LE.QuickCode.mods_t ->
inv: (_: Vale.PPC64LE.Decls.va_state -> _: a -> Type0) ->
dec: (_: Vale.PPC64LE.Decls.va_state -> _: a -> d) ->
g1: a ->
s1: Vale.PPC64LE.Decls.va_state ->
k: (_: Vale.PPC64LE.Decls.va_state -> _: a -> Type0)
-> Type0 | {
"end_col": 39,
"end_line": 278,
"start_col": 2,
"start_line": 275
} |
Prims.Tot | val va_QEmpty (#a: Type0) (v: a) : quickCodes a [] | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v | val va_QEmpty (#a: Type0) (v: a) : quickCodes a []
let va_QEmpty (#a: Type0) (v: a) : quickCodes a [] = | false | null | false | QEmpty v | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.QEmpty",
"Vale.PPC64LE.QuickCodes.quickCodes",
"Prims.Nil",
"Vale.PPC64LE.Machine_s.precode",
"Vale.PPC64LE.Decls.ins",
"Vale.PPC64LE.Decls.ocmp"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_QEmpty (#a: Type0) (v: a) : quickCodes a [] | [] | Vale.PPC64LE.QuickCodes.va_QEmpty | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | v: a -> Vale.PPC64LE.QuickCodes.quickCodes a [] | {
"end_col": 78,
"end_line": 73,
"start_col": 70,
"start_line": 73
} |
Prims.Tot | val wp_InlineIf
(#a: Type)
(#c1 #c2: code)
(b: bool)
(qc1: quickCode a c1)
(qc2: quickCode a c2)
(mods: mods_t)
(s0: va_state)
(k: (va_state -> a -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k) | val wp_InlineIf
(#a: Type)
(#c1 #c2: code)
(b: bool)
(qc1: quickCode a c1)
(qc2: quickCode a c2)
(mods: mods_t)
(s0: va_state)
(k: (va_state -> a -> Type0))
: Type0
let wp_InlineIf
(#a: Type)
(#c1 #c2: code)
(b: bool)
(qc1: quickCode a c1)
(qc2: quickCode a c2)
(mods: mods_t)
(s0: va_state)
(k: (va_state -> a -> Type0))
: Type0 = | false | null | false | (b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k) | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.code",
"Prims.bool",
"Vale.PPC64LE.QuickCode.quickCode",
"Vale.PPC64LE.QuickCode.mods_t",
"Vale.PPC64LE.Decls.va_state",
"Prims.l_and",
"Prims.l_imp",
"Prims.b2t",
"Vale.PPC64LE.QuickCodes.mods_contains",
"Vale.PPC64LE.QuickCode.__proj__QProc__item__mods",
"Vale.PPC64LE.QuickCode.__proj__QProc__item__wp",
"Prims.op_Negation"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 = | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val wp_InlineIf
(#a: Type)
(#c1 #c2: code)
(b: bool)
(qc1: quickCode a c1)
(qc2: quickCode a c2)
(mods: mods_t)
(s0: va_state)
(k: (va_state -> a -> Type0))
: Type0 | [] | Vale.PPC64LE.QuickCodes.wp_InlineIf | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
b: Prims.bool ->
qc1: Vale.PPC64LE.QuickCode.quickCode a c1 ->
qc2: Vale.PPC64LE.QuickCode.quickCode a c2 ->
mods: Vale.PPC64LE.QuickCode.mods_t ->
s0: Vale.PPC64LE.Decls.va_state ->
k: (_: Vale.PPC64LE.Decls.va_state -> _: a -> Type0)
-> Type0 | {
"end_col": 63,
"end_line": 191,
"start_col": 2,
"start_line": 190
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tAssumeLemma (p:Type0) = unit -> Lemma (requires True) (ensures p) | let tAssumeLemma (p: Type0) = | false | null | false | unit -> Lemma (requires True) (ensures p) | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr]
let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2
[@va_qattr]
let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
( eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k))
val qIf_proof (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_If b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (IfElse (cmp_to_ocmp b) c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (IfElse (cmp_to_ocmp b) c1 c2) =
QProc (IfElse (cmp_to_ocmp b) c1 c2) mods (wp_If b qc1 qc2 mods) (qIf_proof b qc1 qc2 mods)
///// While
[@va_qattr]
let wp_While_inv
(#a #d:Type) (#c:code) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (s1:va_state) (g1:a) (s2:va_state) (g2:a)
: Type0 =
s2.ok /\ inv s2 g2 /\ mods_contains mods (qc g2).mods /\ dec s2 g2 << dec s1 g1
[@va_qattr]
let wp_While_body
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g1:a) (s1:va_state) (k:va_state -> a -> Type0)
: Type0 =
valid_cmp b s1 /\
(let s1' = va_upd_cr0 (eval_cmp_cr0 s1 (cmp_to_ocmp b)) s1 in
( eval_cmp s1 b ==> mods_contains mods (qc g1).mods /\ QProc?.wp (qc g1) s1' (wp_While_inv qc mods inv dec s1 g1)) /\
(not (eval_cmp s1 b) ==> k s1' g1))
[@va_qattr]
let wp_While
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Type0 =
inv s0 g0 /\ mods_contains mods (qc g0).mods /\ mods_contains1 mods Mod_cr0 /\
// REVIEW: we could get a better WP with forall (...state components...) instead of forall (s1:va_state)
(forall (s1:va_state) (g1:a). inv s1 g1 ==> wp_While_body b qc mods inv dec g1 s1 k)
val qWhile_proof
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_While b qc mods inv dec g0 s0 k)
(ensures fun (sM, f0, g) ->
eval_code (While (cmp_to_ocmp b) c) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qWhile
(#a #d:Type) (#c:code) (mods:mods_t) (b:cmp) (qc:a -> quickCode a c) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a)
: quickCode a (While (cmp_to_ocmp b) c) =
QProc (While (cmp_to_ocmp b) c) mods (wp_While b qc mods inv dec g0)
(qWhile_proof b qc mods inv dec g0)
///// Assert, Assume, AssertBy
let tAssertLemma (p:Type0) = unit -> Lemma (requires p) (ensures p)
val qAssertLemma (p:Type0) : tAssertLemma p
[@va_qattr]
let va_qAssert (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg e (fun () -> e) (qAssertLemma e) qcs | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tAssumeLemma : p: Type0 -> Type0 | [] | Vale.PPC64LE.QuickCodes.tAssumeLemma | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | p: Type0 -> Type0 | {
"end_col": 70,
"end_line": 315,
"start_col": 29,
"start_line": 315
} |
|
Prims.Tot | val va_QBind
(#a: Type0)
(#b: Type)
(#c: code)
(#cs: codes)
(r: range)
(msg: string)
(qc: quickCode b c)
(qcs: (va_state -> b -> GTot (quickCodes a cs)))
: quickCodes a (c :: cs) | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs | val va_QBind
(#a: Type0)
(#b: Type)
(#c: code)
(#cs: codes)
(r: range)
(msg: string)
(qc: quickCode b c)
(qcs: (va_state -> b -> GTot (quickCodes a cs)))
: quickCodes a (c :: cs)
let va_QBind
(#a: Type0)
(#b: Type)
(#c: code)
(#cs: codes)
(r: range)
(msg: string)
(qc: quickCode b c)
(qcs: (va_state -> b -> GTot (quickCodes a cs)))
: quickCodes a (c :: cs) = | false | null | false | QBind r msg qc qcs | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.code",
"Vale.PPC64LE.QuickCodes.codes",
"FStar.Range.range",
"Prims.string",
"Vale.PPC64LE.QuickCode.quickCode",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.QuickCodes.quickCodes",
"Vale.PPC64LE.QuickCodes.QBind",
"Prims.Cons",
"Vale.PPC64LE.Decls.va_code"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_QBind
(#a: Type0)
(#b: Type)
(#c: code)
(#cs: codes)
(r: range)
(msg: string)
(qc: quickCode b c)
(qcs: (va_state -> b -> GTot (quickCodes a cs)))
: quickCodes a (c :: cs) | [] | Vale.PPC64LE.QuickCodes.va_QBind | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
r: FStar.Range.range ->
msg: Prims.string ->
qc: Vale.PPC64LE.QuickCode.quickCode b c ->
qcs:
(_: Vale.PPC64LE.Decls.va_state -> _: b
-> Prims.GTot (Vale.PPC64LE.QuickCodes.quickCodes a cs))
-> Vale.PPC64LE.QuickCodes.quickCodes a (c :: cs) | {
"end_col": 206,
"end_line": 72,
"start_col": 188,
"start_line": 72
} |
Prims.Tot | val wp_block
(#a: Type)
(#cs: codes)
(qcs: (va_state -> GTot (quickCodes a cs)))
(mods: mods_t)
(s0: va_state)
(k: (va_state -> a -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0 | val wp_block
(#a: Type)
(#cs: codes)
(qcs: (va_state -> GTot (quickCodes a cs)))
(mods: mods_t)
(s0: va_state)
(k: (va_state -> a -> Type0))
: Type0
let wp_block
(#a: Type)
(#cs: codes)
(qcs: (va_state -> GTot (quickCodes a cs)))
(mods: mods_t)
(s0: va_state)
(k: (va_state -> a -> Type0))
: Type0 = | false | null | false | wp cs (qcs s0) mods k s0 | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.codes",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.QuickCodes.quickCodes",
"Vale.PPC64LE.QuickCode.mods_t",
"Vale.PPC64LE.QuickCodes.wp"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr] | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val wp_block
(#a: Type)
(#cs: codes)
(qcs: (va_state -> GTot (quickCodes a cs)))
(mods: mods_t)
(s0: va_state)
(k: (va_state -> a -> Type0))
: Type0 | [] | Vale.PPC64LE.QuickCodes.wp_block | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
qcs: (_: Vale.PPC64LE.Decls.va_state -> Prims.GTot (Vale.PPC64LE.QuickCodes.quickCodes a cs)) ->
mods: Vale.PPC64LE.QuickCode.mods_t ->
s0: Vale.PPC64LE.Decls.va_state ->
k: (_: Vale.PPC64LE.Decls.va_state -> _: a -> Type0)
-> Type0 | {
"end_col": 26,
"end_line": 172,
"start_col": 2,
"start_line": 172
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tAssertSquashLemma (p:Type0) = unit -> Ghost (squash p) (requires p) (ensures fun () -> p) | let tAssertSquashLemma (p: Type0) = | false | null | false | unit -> Ghost (squash p) (requires p) (ensures fun () -> p) | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Prims.unit",
"Prims.squash"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr]
let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2
[@va_qattr]
let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
( eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k))
val qIf_proof (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_If b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (IfElse (cmp_to_ocmp b) c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (IfElse (cmp_to_ocmp b) c1 c2) =
QProc (IfElse (cmp_to_ocmp b) c1 c2) mods (wp_If b qc1 qc2 mods) (qIf_proof b qc1 qc2 mods)
///// While
[@va_qattr]
let wp_While_inv
(#a #d:Type) (#c:code) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (s1:va_state) (g1:a) (s2:va_state) (g2:a)
: Type0 =
s2.ok /\ inv s2 g2 /\ mods_contains mods (qc g2).mods /\ dec s2 g2 << dec s1 g1
[@va_qattr]
let wp_While_body
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g1:a) (s1:va_state) (k:va_state -> a -> Type0)
: Type0 =
valid_cmp b s1 /\
(let s1' = va_upd_cr0 (eval_cmp_cr0 s1 (cmp_to_ocmp b)) s1 in
( eval_cmp s1 b ==> mods_contains mods (qc g1).mods /\ QProc?.wp (qc g1) s1' (wp_While_inv qc mods inv dec s1 g1)) /\
(not (eval_cmp s1 b) ==> k s1' g1))
[@va_qattr]
let wp_While
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Type0 =
inv s0 g0 /\ mods_contains mods (qc g0).mods /\ mods_contains1 mods Mod_cr0 /\
// REVIEW: we could get a better WP with forall (...state components...) instead of forall (s1:va_state)
(forall (s1:va_state) (g1:a). inv s1 g1 ==> wp_While_body b qc mods inv dec g1 s1 k)
val qWhile_proof
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_While b qc mods inv dec g0 s0 k)
(ensures fun (sM, f0, g) ->
eval_code (While (cmp_to_ocmp b) c) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qWhile
(#a #d:Type) (#c:code) (mods:mods_t) (b:cmp) (qc:a -> quickCode a c) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a)
: quickCode a (While (cmp_to_ocmp b) c) =
QProc (While (cmp_to_ocmp b) c) mods (wp_While b qc mods inv dec g0)
(qWhile_proof b qc mods inv dec g0)
///// Assert, Assume, AssertBy
let tAssertLemma (p:Type0) = unit -> Lemma (requires p) (ensures p)
val qAssertLemma (p:Type0) : tAssertLemma p
[@va_qattr]
let va_qAssert (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg e (fun () -> e) (qAssertLemma e) qcs
let tAssumeLemma (p:Type0) = unit -> Lemma (requires True) (ensures p)
val qAssumeLemma (p:Type0) : tAssumeLemma p
[@va_qattr]
let va_qAssume (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg True (fun () -> e) (qAssumeLemma e) qcs | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tAssertSquashLemma : p: Type0 -> Type0 | [] | Vale.PPC64LE.QuickCodes.tAssertSquashLemma | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | p: Type0 -> Type0 | {
"end_col": 94,
"end_line": 322,
"start_col": 35,
"start_line": 322
} |
|
Prims.Tot | val va_QSeq
(#a: Type0)
(#b: Type)
(#c: code)
(#cs: codes)
(r: range)
(msg: string)
(qc: quickCode b c)
(qcs: quickCodes a cs)
: quickCodes a (c :: cs) | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs | val va_QSeq
(#a: Type0)
(#b: Type)
(#c: code)
(#cs: codes)
(r: range)
(msg: string)
(qc: quickCode b c)
(qcs: quickCodes a cs)
: quickCodes a (c :: cs)
let va_QSeq
(#a: Type0)
(#b: Type)
(#c: code)
(#cs: codes)
(r: range)
(msg: string)
(qc: quickCode b c)
(qcs: quickCodes a cs)
: quickCodes a (c :: cs) = | false | null | false | QSeq r msg qc qcs | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.code",
"Vale.PPC64LE.QuickCodes.codes",
"FStar.Range.range",
"Prims.string",
"Vale.PPC64LE.QuickCode.quickCode",
"Vale.PPC64LE.QuickCodes.quickCodes",
"Vale.PPC64LE.QuickCodes.QSeq",
"Prims.Cons",
"Vale.PPC64LE.Decls.va_code"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_QSeq
(#a: Type0)
(#b: Type)
(#c: code)
(#cs: codes)
(r: range)
(msg: string)
(qc: quickCode b c)
(qcs: quickCodes a cs)
: quickCodes a (c :: cs) | [] | Vale.PPC64LE.QuickCodes.va_QSeq | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
r: FStar.Range.range ->
msg: Prims.string ->
qc: Vale.PPC64LE.QuickCode.quickCode b c ->
qcs: Vale.PPC64LE.QuickCodes.quickCodes a cs
-> Vale.PPC64LE.QuickCodes.quickCodes a (c :: cs) | {
"end_col": 180,
"end_line": 75,
"start_col": 163,
"start_line": 75
} |
Prims.Tot | val va_qAssertSquash
(#a: Type)
(#cs: codes)
(r: range)
(msg: string)
(e: Type0)
(qcs: (squash e -> GTot (quickCodes a cs)))
: quickCodes a ((Block []) :: cs) | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_qAssertSquash
(#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:squash e -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QGhost (squash e) r msg e (fun () -> e) (qAssertSquashLemma e) qcs | val va_qAssertSquash
(#a: Type)
(#cs: codes)
(r: range)
(msg: string)
(e: Type0)
(qcs: (squash e -> GTot (quickCodes a cs)))
: quickCodes a ((Block []) :: cs)
let va_qAssertSquash
(#a: Type)
(#cs: codes)
(r: range)
(msg: string)
(e: Type0)
(qcs: (squash e -> GTot (quickCodes a cs)))
: quickCodes a ((Block []) :: cs) = | false | null | false | QGhost (squash e) r msg e (fun () -> e) (qAssertSquashLemma e) qcs | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.codes",
"FStar.Range.range",
"Prims.string",
"Prims.squash",
"Vale.PPC64LE.QuickCodes.quickCodes",
"Vale.PPC64LE.QuickCodes.QGhost",
"Prims.unit",
"Vale.PPC64LE.QuickCodes.qAssertSquashLemma",
"Prims.Cons",
"Vale.PPC64LE.Decls.va_code",
"Vale.PPC64LE.Machine_s.Block",
"Vale.PPC64LE.Decls.ins",
"Vale.PPC64LE.Decls.ocmp",
"Prims.Nil",
"Vale.PPC64LE.Machine_s.precode"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr]
let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2
[@va_qattr]
let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
( eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k))
val qIf_proof (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_If b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (IfElse (cmp_to_ocmp b) c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (IfElse (cmp_to_ocmp b) c1 c2) =
QProc (IfElse (cmp_to_ocmp b) c1 c2) mods (wp_If b qc1 qc2 mods) (qIf_proof b qc1 qc2 mods)
///// While
[@va_qattr]
let wp_While_inv
(#a #d:Type) (#c:code) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (s1:va_state) (g1:a) (s2:va_state) (g2:a)
: Type0 =
s2.ok /\ inv s2 g2 /\ mods_contains mods (qc g2).mods /\ dec s2 g2 << dec s1 g1
[@va_qattr]
let wp_While_body
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g1:a) (s1:va_state) (k:va_state -> a -> Type0)
: Type0 =
valid_cmp b s1 /\
(let s1' = va_upd_cr0 (eval_cmp_cr0 s1 (cmp_to_ocmp b)) s1 in
( eval_cmp s1 b ==> mods_contains mods (qc g1).mods /\ QProc?.wp (qc g1) s1' (wp_While_inv qc mods inv dec s1 g1)) /\
(not (eval_cmp s1 b) ==> k s1' g1))
[@va_qattr]
let wp_While
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Type0 =
inv s0 g0 /\ mods_contains mods (qc g0).mods /\ mods_contains1 mods Mod_cr0 /\
// REVIEW: we could get a better WP with forall (...state components...) instead of forall (s1:va_state)
(forall (s1:va_state) (g1:a). inv s1 g1 ==> wp_While_body b qc mods inv dec g1 s1 k)
val qWhile_proof
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_While b qc mods inv dec g0 s0 k)
(ensures fun (sM, f0, g) ->
eval_code (While (cmp_to_ocmp b) c) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qWhile
(#a #d:Type) (#c:code) (mods:mods_t) (b:cmp) (qc:a -> quickCode a c) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a)
: quickCode a (While (cmp_to_ocmp b) c) =
QProc (While (cmp_to_ocmp b) c) mods (wp_While b qc mods inv dec g0)
(qWhile_proof b qc mods inv dec g0)
///// Assert, Assume, AssertBy
let tAssertLemma (p:Type0) = unit -> Lemma (requires p) (ensures p)
val qAssertLemma (p:Type0) : tAssertLemma p
[@va_qattr]
let va_qAssert (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg e (fun () -> e) (qAssertLemma e) qcs
let tAssumeLemma (p:Type0) = unit -> Lemma (requires True) (ensures p)
val qAssumeLemma (p:Type0) : tAssumeLemma p
[@va_qattr]
let va_qAssume (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg True (fun () -> e) (qAssumeLemma e) qcs
let tAssertSquashLemma (p:Type0) = unit -> Ghost (squash p) (requires p) (ensures fun () -> p)
val qAssertSquashLemma (p:Type0) : tAssertSquashLemma p
[@va_qattr]
let va_qAssertSquash
(#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:squash e -> GTot (quickCodes a cs)) | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_qAssertSquash
(#a: Type)
(#cs: codes)
(r: range)
(msg: string)
(e: Type0)
(qcs: (squash e -> GTot (quickCodes a cs)))
: quickCodes a ((Block []) :: cs) | [] | Vale.PPC64LE.QuickCodes.va_qAssertSquash | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
r: FStar.Range.range ->
msg: Prims.string ->
e: Type0 ->
qcs: (_: Prims.squash e -> Prims.GTot (Vale.PPC64LE.QuickCodes.quickCodes a cs))
-> Vale.PPC64LE.QuickCodes.quickCodes a (Vale.PPC64LE.Machine_s.Block [] :: cs) | {
"end_col": 68,
"end_line": 329,
"start_col": 2,
"start_line": 329
} |
Prims.Tot | val state_match (s0 s1: va_state) : Type0 | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let state_match (s0:va_state) (s1:va_state) : Type0 =
s0.ok == s1.ok /\
Regs.equal s0.regs s1.regs /\
Vecs.equal s0.vecs s1.vecs /\
s0.cr0 == s1.cr0 /\
s0.xer == s1.xer /\
s0.ms_heap == s1.ms_heap /\
s0.ms_stack == s1.ms_stack /\
s0.ms_stackTaint == s1.ms_stackTaint | val state_match (s0 s1: va_state) : Type0
let state_match (s0 s1: va_state) : Type0 = | false | null | false | s0.ok == s1.ok /\ Regs.equal s0.regs s1.regs /\ Vecs.equal s0.vecs s1.vecs /\ s0.cr0 == s1.cr0 /\
s0.xer == s1.xer /\ s0.ms_heap == s1.ms_heap /\ s0.ms_stack == s1.ms_stack /\
s0.ms_stackTaint == s1.ms_stackTaint | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_state",
"Prims.l_and",
"Prims.eq2",
"Prims.bool",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ok",
"Vale.PPC64LE.Regs.equal",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__regs",
"Vale.PPC64LE.Vecs.equal",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__vecs",
"Vale.PPC64LE.Machine_s.cr0_t",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__cr0",
"Vale.PPC64LE.Machine_s.xer_t",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__xer",
"Vale.Arch.Heap.heap_impl",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_heap",
"Vale.PPC64LE.Machine_s.machine_stack",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stack",
"Vale.Arch.HeapTypes_s.memTaint_t",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stackTaint"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr]
let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2
[@va_qattr]
let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
( eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k))
val qIf_proof (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_If b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (IfElse (cmp_to_ocmp b) c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (IfElse (cmp_to_ocmp b) c1 c2) =
QProc (IfElse (cmp_to_ocmp b) c1 c2) mods (wp_If b qc1 qc2 mods) (qIf_proof b qc1 qc2 mods)
///// While
[@va_qattr]
let wp_While_inv
(#a #d:Type) (#c:code) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (s1:va_state) (g1:a) (s2:va_state) (g2:a)
: Type0 =
s2.ok /\ inv s2 g2 /\ mods_contains mods (qc g2).mods /\ dec s2 g2 << dec s1 g1
[@va_qattr]
let wp_While_body
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g1:a) (s1:va_state) (k:va_state -> a -> Type0)
: Type0 =
valid_cmp b s1 /\
(let s1' = va_upd_cr0 (eval_cmp_cr0 s1 (cmp_to_ocmp b)) s1 in
( eval_cmp s1 b ==> mods_contains mods (qc g1).mods /\ QProc?.wp (qc g1) s1' (wp_While_inv qc mods inv dec s1 g1)) /\
(not (eval_cmp s1 b) ==> k s1' g1))
[@va_qattr]
let wp_While
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Type0 =
inv s0 g0 /\ mods_contains mods (qc g0).mods /\ mods_contains1 mods Mod_cr0 /\
// REVIEW: we could get a better WP with forall (...state components...) instead of forall (s1:va_state)
(forall (s1:va_state) (g1:a). inv s1 g1 ==> wp_While_body b qc mods inv dec g1 s1 k)
val qWhile_proof
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_While b qc mods inv dec g0 s0 k)
(ensures fun (sM, f0, g) ->
eval_code (While (cmp_to_ocmp b) c) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qWhile
(#a #d:Type) (#c:code) (mods:mods_t) (b:cmp) (qc:a -> quickCode a c) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a)
: quickCode a (While (cmp_to_ocmp b) c) =
QProc (While (cmp_to_ocmp b) c) mods (wp_While b qc mods inv dec g0)
(qWhile_proof b qc mods inv dec g0)
///// Assert, Assume, AssertBy
let tAssertLemma (p:Type0) = unit -> Lemma (requires p) (ensures p)
val qAssertLemma (p:Type0) : tAssertLemma p
[@va_qattr]
let va_qAssert (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg e (fun () -> e) (qAssertLemma e) qcs
let tAssumeLemma (p:Type0) = unit -> Lemma (requires True) (ensures p)
val qAssumeLemma (p:Type0) : tAssumeLemma p
[@va_qattr]
let va_qAssume (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg True (fun () -> e) (qAssumeLemma e) qcs
let tAssertSquashLemma (p:Type0) = unit -> Ghost (squash p) (requires p) (ensures fun () -> p)
val qAssertSquashLemma (p:Type0) : tAssertSquashLemma p
[@va_qattr]
let va_qAssertSquash
(#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:squash e -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QGhost (squash e) r msg e (fun () -> e) (qAssertSquashLemma e) qcs
//let tAssertByLemma (#a:Type) (p:Type0) (qcs:quickCodes a []) (mods:mods_t) (s0:state) =
// unit -> Lemma (requires t_require s0 /\ wp [] qcs mods (fun _ _ -> p) s0) (ensures p)
//val qAssertByLemma (#a:Type) (p:Type0) (qcs:quickCodes a []) (mods:mods_t) (s0:state) : tAssertByLemma p qcs mods s0
//
//[@va_qattr]
//let va_qAssertBy (#a:Type) (#cs:codes) (mods:mods_t) (r:range) (msg:string) (p:Type0) (qcsBy:quickCodes unit []) (s0:state) (qcsTail:quickCodes a cs) : quickCodes a cs =
// QLemma r msg (t_require s0 /\ wp [] qcsBy mods (fun _ _ -> p) s0) (fun () -> p) (qAssertByLemma p qcsBy mods s0) qcsTail
[@va_qattr]
let va_qAssertBy (#a:Type) (#cs:codes) (r:range) (msg:string) (p:Type0) (qcsBy:quickCodes unit []) (qcsTail:quickCodes a cs) : quickCodes a cs =
QAssertBy r msg p qcsBy qcsTail
///// Code
val wp_sound_code (#a:Type0) (c:code) (qc:quickCode a c) (k:va_state -> a -> Type0) (s0:va_state) :
Ghost (va_state & fuel & a)
(requires t_require s0 /\ QProc?.wp qc s0 k)
(ensures fun (sN, fN, gN) -> eval_code c s0 fN sN /\ update_state_mods qc.mods sN s0 == sN /\ state_inv sN /\ k sN gN)
[@va_qattr] | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val state_match (s0 s1: va_state) : Type0 | [] | Vale.PPC64LE.QuickCodes.state_match | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s0: Vale.PPC64LE.Decls.va_state -> s1: Vale.PPC64LE.Decls.va_state -> Type0 | {
"end_col": 38,
"end_line": 359,
"start_col": 2,
"start_line": 352
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let fuel = va_fuel | let fuel = | false | null | false | va_fuel | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_fuel"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val fuel : Type0 | [] | Vale.PPC64LE.QuickCodes.fuel | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type0 | {
"end_col": 25,
"end_line": 16,
"start_col": 18,
"start_line": 16
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let eval = eval_code | let eval = | false | null | false | eval_code | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.eval_code"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val eval : c: Vale.PPC64LE.Decls.va_code ->
s0: Vale.PPC64LE.Decls.va_state ->
f0: Vale.PPC64LE.Decls.va_fuel ->
sN: Vale.PPC64LE.Decls.va_state
-> Vale.Def.Prop_s.prop0 | [] | Vale.PPC64LE.QuickCodes.eval | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
c: Vale.PPC64LE.Decls.va_code ->
s0: Vale.PPC64LE.Decls.va_state ->
f0: Vale.PPC64LE.Decls.va_fuel ->
sN: Vale.PPC64LE.Decls.va_state
-> Vale.Def.Prop_s.prop0 | {
"end_col": 27,
"end_line": 17,
"start_col": 18,
"start_line": 17
} |
|
Prims.GTot | val labeled_wrap (r: range) (msg: string) (p: Type0) : GTot Type0 | [
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p | val labeled_wrap (r: range) (msg: string) (p: Type0) : GTot Type0
let labeled_wrap (r: range) (msg: string) (p: Type0) : GTot Type0 = | false | null | false | labeled r msg p | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"sometrivial"
] | [
"FStar.Range.range",
"Prims.string",
"FStar.Range.labeled"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val labeled_wrap (r: range) (msg: string) (p: Type0) : GTot Type0 | [] | Vale.PPC64LE.QuickCodes.labeled_wrap | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | r: FStar.Range.range -> msg: Prims.string -> p: Type0 -> Prims.GTot Type0 | {
"end_col": 80,
"end_line": 20,
"start_col": 65,
"start_line": 20
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let wp_sound_code_post (#a:Type0) (#c:code) (qc:quickCode a c) (s0:va_state) (k:(s0':va_state{s0 == s0'}) -> va_state -> a -> Type0) ((sN:va_state), (fN:fuel), (gN:a)) : Type0 =
eval c s0 fN sN /\
update_state_mods qc.mods sN s0 == sN /\
state_inv sN /\
k s0 sN gN | let wp_sound_code_post
(#a: Type0)
(#c: code)
(qc: quickCode a c)
(s0: va_state)
(k: (s0': va_state{s0 == s0'} -> va_state -> a -> Type0))
((sN: va_state), (fN: fuel), (gN: a))
: Type0 = | false | null | false | eval c s0 fN sN /\ update_state_mods qc.mods sN s0 == sN /\ state_inv sN /\ k s0 sN gN | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.code",
"Vale.PPC64LE.QuickCode.quickCode",
"Vale.PPC64LE.Decls.va_state",
"Prims.eq2",
"FStar.Pervasives.Native.tuple3",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.Decls.va_fuel",
"Prims.l_and",
"Vale.PPC64LE.QuickCodes.eval",
"Vale.PPC64LE.QuickCode.update_state_mods",
"Vale.PPC64LE.QuickCode.__proj__QProc__item__mods",
"Vale.PPC64LE.Decls.state_inv"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr]
let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2
[@va_qattr]
let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
( eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k))
val qIf_proof (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_If b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (IfElse (cmp_to_ocmp b) c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (IfElse (cmp_to_ocmp b) c1 c2) =
QProc (IfElse (cmp_to_ocmp b) c1 c2) mods (wp_If b qc1 qc2 mods) (qIf_proof b qc1 qc2 mods)
///// While
[@va_qattr]
let wp_While_inv
(#a #d:Type) (#c:code) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (s1:va_state) (g1:a) (s2:va_state) (g2:a)
: Type0 =
s2.ok /\ inv s2 g2 /\ mods_contains mods (qc g2).mods /\ dec s2 g2 << dec s1 g1
[@va_qattr]
let wp_While_body
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g1:a) (s1:va_state) (k:va_state -> a -> Type0)
: Type0 =
valid_cmp b s1 /\
(let s1' = va_upd_cr0 (eval_cmp_cr0 s1 (cmp_to_ocmp b)) s1 in
( eval_cmp s1 b ==> mods_contains mods (qc g1).mods /\ QProc?.wp (qc g1) s1' (wp_While_inv qc mods inv dec s1 g1)) /\
(not (eval_cmp s1 b) ==> k s1' g1))
[@va_qattr]
let wp_While
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Type0 =
inv s0 g0 /\ mods_contains mods (qc g0).mods /\ mods_contains1 mods Mod_cr0 /\
// REVIEW: we could get a better WP with forall (...state components...) instead of forall (s1:va_state)
(forall (s1:va_state) (g1:a). inv s1 g1 ==> wp_While_body b qc mods inv dec g1 s1 k)
val qWhile_proof
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_While b qc mods inv dec g0 s0 k)
(ensures fun (sM, f0, g) ->
eval_code (While (cmp_to_ocmp b) c) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qWhile
(#a #d:Type) (#c:code) (mods:mods_t) (b:cmp) (qc:a -> quickCode a c) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a)
: quickCode a (While (cmp_to_ocmp b) c) =
QProc (While (cmp_to_ocmp b) c) mods (wp_While b qc mods inv dec g0)
(qWhile_proof b qc mods inv dec g0)
///// Assert, Assume, AssertBy
let tAssertLemma (p:Type0) = unit -> Lemma (requires p) (ensures p)
val qAssertLemma (p:Type0) : tAssertLemma p
[@va_qattr]
let va_qAssert (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg e (fun () -> e) (qAssertLemma e) qcs
let tAssumeLemma (p:Type0) = unit -> Lemma (requires True) (ensures p)
val qAssumeLemma (p:Type0) : tAssumeLemma p
[@va_qattr]
let va_qAssume (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg True (fun () -> e) (qAssumeLemma e) qcs
let tAssertSquashLemma (p:Type0) = unit -> Ghost (squash p) (requires p) (ensures fun () -> p)
val qAssertSquashLemma (p:Type0) : tAssertSquashLemma p
[@va_qattr]
let va_qAssertSquash
(#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:squash e -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QGhost (squash e) r msg e (fun () -> e) (qAssertSquashLemma e) qcs
//let tAssertByLemma (#a:Type) (p:Type0) (qcs:quickCodes a []) (mods:mods_t) (s0:state) =
// unit -> Lemma (requires t_require s0 /\ wp [] qcs mods (fun _ _ -> p) s0) (ensures p)
//val qAssertByLemma (#a:Type) (p:Type0) (qcs:quickCodes a []) (mods:mods_t) (s0:state) : tAssertByLemma p qcs mods s0
//
//[@va_qattr]
//let va_qAssertBy (#a:Type) (#cs:codes) (mods:mods_t) (r:range) (msg:string) (p:Type0) (qcsBy:quickCodes unit []) (s0:state) (qcsTail:quickCodes a cs) : quickCodes a cs =
// QLemma r msg (t_require s0 /\ wp [] qcsBy mods (fun _ _ -> p) s0) (fun () -> p) (qAssertByLemma p qcsBy mods s0) qcsTail
[@va_qattr]
let va_qAssertBy (#a:Type) (#cs:codes) (r:range) (msg:string) (p:Type0) (qcsBy:quickCodes unit []) (qcsTail:quickCodes a cs) : quickCodes a cs =
QAssertBy r msg p qcsBy qcsTail
///// Code
val wp_sound_code (#a:Type0) (c:code) (qc:quickCode a c) (k:va_state -> a -> Type0) (s0:va_state) :
Ghost (va_state & fuel & a)
(requires t_require s0 /\ QProc?.wp qc s0 k)
(ensures fun (sN, fN, gN) -> eval_code c s0 fN sN /\ update_state_mods qc.mods sN s0 == sN /\ state_inv sN /\ k sN gN)
[@va_qattr]
let state_match (s0:va_state) (s1:va_state) : Type0 =
s0.ok == s1.ok /\
Regs.equal s0.regs s1.regs /\
Vecs.equal s0.vecs s1.vecs /\
s0.cr0 == s1.cr0 /\
s0.xer == s1.xer /\
s0.ms_heap == s1.ms_heap /\
s0.ms_stack == s1.ms_stack /\
s0.ms_stackTaint == s1.ms_stackTaint
val lemma_state_match (s0:va_state) (s1:va_state) : Lemma
(requires state_match s0 s1)
(ensures state_eq s0 s1)
[@va_qattr]
let va_state_match (s0:va_state) (s1:va_state) : Pure Type0
(requires True)
(ensures fun b -> b ==> state_eq s0 s1)
=
FStar.Classical.move_requires (lemma_state_match s0) s1;
state_match s0 s1
[@va_qattr]
unfold let wp_sound_code_pre (#a:Type0) (#c:code) (qc:quickCode a c) (s0:va_state) (k:(s0':va_state{s0 == s0'}) -> va_state -> a -> Type0) : Type0 =
forall
(ok:bool)
(regs:Regs.t)
(vecs:Vecs.t)
(cr0:cr0_t)
(xer:xer_t)
//(mem:vale_full_heap) // splitting mem into its components makes the VCs slightly cleaner:
(mem_layout:vale_heap_layout)
(mem_heap:vale_heap)
(mem_heaplets:vale_heaplets)
(stack:machine_stack)
(stackTaint:memtaint)
.
let mem = {
vf_layout = mem_layout;
vf_heap = mem_heap;
vf_heaplets = mem_heaplets;
} in
let s0' = {
ok = ok;
regs = regs;
vecs = vecs;
cr0 = cr0;
xer = xer;
ms_heap = coerce mem;
ms_stack = stack;
ms_stackTaint = stackTaint
} in
s0 == s0' ==> QProc?.wp qc (state_eta s0') (k (state_eta s0')) | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val wp_sound_code_post : qc: Vale.PPC64LE.QuickCode.quickCode a c ->
s0: Vale.PPC64LE.Decls.va_state ->
k:
(s0': Vale.PPC64LE.Decls.va_state{s0 == s0'} -> _: Vale.PPC64LE.Decls.va_state -> _: a
-> Type0) ->
_: ((Vale.PPC64LE.State.state * Vale.PPC64LE.Decls.va_fuel) * a)
-> Type0 | [] | Vale.PPC64LE.QuickCodes.wp_sound_code_post | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
qc: Vale.PPC64LE.QuickCode.quickCode a c ->
s0: Vale.PPC64LE.Decls.va_state ->
k:
(s0': Vale.PPC64LE.Decls.va_state{s0 == s0'} -> _: Vale.PPC64LE.Decls.va_state -> _: a
-> Type0) ->
_: ((Vale.PPC64LE.State.state * Vale.PPC64LE.Decls.va_fuel) * a)
-> Type0 | {
"end_col": 12,
"end_line": 409,
"start_col": 2,
"start_line": 406
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let k_AssertBy (p:Type0) (_:va_state) () = p | let k_AssertBy (p: Type0) (_: va_state) () = | false | null | false | p | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_state",
"Prims.unit"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0 | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val k_AssertBy : p: Type0 -> _: Vale.PPC64LE.Decls.va_state -> _: Prims.unit -> Type0 | [] | Vale.PPC64LE.QuickCodes.k_AssertBy | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | p: Type0 -> _: Vale.PPC64LE.Decls.va_state -> _: Prims.unit -> Type0 | {
"end_col": 44,
"end_line": 101,
"start_col": 43,
"start_line": 101
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_range1 = mk_range "" 0 0 0 0 | let va_range1 = | false | null | false | mk_range "" 0 0 0 0 | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"FStar.Range.mk_range"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_range1 : FStar.Range.range | [] | Vale.PPC64LE.QuickCodes.va_range1 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | FStar.Range.range | {
"end_col": 35,
"end_line": 104,
"start_col": 16,
"start_line": 104
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let code = va_code | let code = | false | null | false | va_code | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_code"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val code : Type0 | [] | Vale.PPC64LE.QuickCodes.code | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type0 | {
"end_col": 25,
"end_line": 14,
"start_col": 18,
"start_line": 14
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let wp_Seq_t (a:Type0) = va_state -> a -> Type0 | let wp_Seq_t (a: Type0) = | false | null | false | va_state -> a -> Type0 | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_state"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val wp_Seq_t : a: Type0 -> Type | [] | Vale.PPC64LE.QuickCodes.wp_Seq_t | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | a: Type0 -> Type | {
"end_col": 47,
"end_line": 99,
"start_col": 25,
"start_line": 99
} |
|
Prims.Tot | val normal_steps:list string | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let normal_steps : list string =
[
`%Mkstate?.ok;
`%Mkstate?.regs;
`%Mkstate?.vecs;
`%Mkstate?.cr0;
`%Mkstate?.xer;
`%Mkstate?.ms_heap;
`%Mkstate?.ms_stack;
`%Mkstate?.ms_stackTaint;
`%Mkvale_full_heap?.vf_layout;
`%Mkvale_full_heap?.vf_heap;
`%Mkvale_full_heap?.vf_heaplets;
`%QProc?.wp;
`%QProc?.mods;
`%FStar.FunctionalExtensionality.on_dom;
] | val normal_steps:list string
let normal_steps:list string = | false | null | false | [
`%Mkstate?.ok; `%Mkstate?.regs; `%Mkstate?.vecs; `%Mkstate?.cr0; `%Mkstate?.xer;
`%Mkstate?.ms_heap; `%Mkstate?.ms_stack; `%Mkstate?.ms_stackTaint; `%Mkvale_full_heap?.vf_layout;
`%Mkvale_full_heap?.vf_heap; `%Mkvale_full_heap?.vf_heaplets; `%QProc?.wp; `%QProc?.mods;
`%FStar.FunctionalExtensionality.on_dom
] | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Prims.Cons",
"Prims.string",
"Prims.Nil"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr]
let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2
[@va_qattr]
let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
( eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k))
val qIf_proof (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_If b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (IfElse (cmp_to_ocmp b) c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (IfElse (cmp_to_ocmp b) c1 c2) =
QProc (IfElse (cmp_to_ocmp b) c1 c2) mods (wp_If b qc1 qc2 mods) (qIf_proof b qc1 qc2 mods)
///// While
[@va_qattr]
let wp_While_inv
(#a #d:Type) (#c:code) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (s1:va_state) (g1:a) (s2:va_state) (g2:a)
: Type0 =
s2.ok /\ inv s2 g2 /\ mods_contains mods (qc g2).mods /\ dec s2 g2 << dec s1 g1
[@va_qattr]
let wp_While_body
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g1:a) (s1:va_state) (k:va_state -> a -> Type0)
: Type0 =
valid_cmp b s1 /\
(let s1' = va_upd_cr0 (eval_cmp_cr0 s1 (cmp_to_ocmp b)) s1 in
( eval_cmp s1 b ==> mods_contains mods (qc g1).mods /\ QProc?.wp (qc g1) s1' (wp_While_inv qc mods inv dec s1 g1)) /\
(not (eval_cmp s1 b) ==> k s1' g1))
[@va_qattr]
let wp_While
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Type0 =
inv s0 g0 /\ mods_contains mods (qc g0).mods /\ mods_contains1 mods Mod_cr0 /\
// REVIEW: we could get a better WP with forall (...state components...) instead of forall (s1:va_state)
(forall (s1:va_state) (g1:a). inv s1 g1 ==> wp_While_body b qc mods inv dec g1 s1 k)
val qWhile_proof
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_While b qc mods inv dec g0 s0 k)
(ensures fun (sM, f0, g) ->
eval_code (While (cmp_to_ocmp b) c) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qWhile
(#a #d:Type) (#c:code) (mods:mods_t) (b:cmp) (qc:a -> quickCode a c) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a)
: quickCode a (While (cmp_to_ocmp b) c) =
QProc (While (cmp_to_ocmp b) c) mods (wp_While b qc mods inv dec g0)
(qWhile_proof b qc mods inv dec g0)
///// Assert, Assume, AssertBy
let tAssertLemma (p:Type0) = unit -> Lemma (requires p) (ensures p)
val qAssertLemma (p:Type0) : tAssertLemma p
[@va_qattr]
let va_qAssert (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg e (fun () -> e) (qAssertLemma e) qcs
let tAssumeLemma (p:Type0) = unit -> Lemma (requires True) (ensures p)
val qAssumeLemma (p:Type0) : tAssumeLemma p
[@va_qattr]
let va_qAssume (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg True (fun () -> e) (qAssumeLemma e) qcs
let tAssertSquashLemma (p:Type0) = unit -> Ghost (squash p) (requires p) (ensures fun () -> p)
val qAssertSquashLemma (p:Type0) : tAssertSquashLemma p
[@va_qattr]
let va_qAssertSquash
(#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:squash e -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QGhost (squash e) r msg e (fun () -> e) (qAssertSquashLemma e) qcs
//let tAssertByLemma (#a:Type) (p:Type0) (qcs:quickCodes a []) (mods:mods_t) (s0:state) =
// unit -> Lemma (requires t_require s0 /\ wp [] qcs mods (fun _ _ -> p) s0) (ensures p)
//val qAssertByLemma (#a:Type) (p:Type0) (qcs:quickCodes a []) (mods:mods_t) (s0:state) : tAssertByLemma p qcs mods s0
//
//[@va_qattr]
//let va_qAssertBy (#a:Type) (#cs:codes) (mods:mods_t) (r:range) (msg:string) (p:Type0) (qcsBy:quickCodes unit []) (s0:state) (qcsTail:quickCodes a cs) : quickCodes a cs =
// QLemma r msg (t_require s0 /\ wp [] qcsBy mods (fun _ _ -> p) s0) (fun () -> p) (qAssertByLemma p qcsBy mods s0) qcsTail
[@va_qattr]
let va_qAssertBy (#a:Type) (#cs:codes) (r:range) (msg:string) (p:Type0) (qcsBy:quickCodes unit []) (qcsTail:quickCodes a cs) : quickCodes a cs =
QAssertBy r msg p qcsBy qcsTail
///// Code
val wp_sound_code (#a:Type0) (c:code) (qc:quickCode a c) (k:va_state -> a -> Type0) (s0:va_state) :
Ghost (va_state & fuel & a)
(requires t_require s0 /\ QProc?.wp qc s0 k)
(ensures fun (sN, fN, gN) -> eval_code c s0 fN sN /\ update_state_mods qc.mods sN s0 == sN /\ state_inv sN /\ k sN gN)
[@va_qattr]
let state_match (s0:va_state) (s1:va_state) : Type0 =
s0.ok == s1.ok /\
Regs.equal s0.regs s1.regs /\
Vecs.equal s0.vecs s1.vecs /\
s0.cr0 == s1.cr0 /\
s0.xer == s1.xer /\
s0.ms_heap == s1.ms_heap /\
s0.ms_stack == s1.ms_stack /\
s0.ms_stackTaint == s1.ms_stackTaint
val lemma_state_match (s0:va_state) (s1:va_state) : Lemma
(requires state_match s0 s1)
(ensures state_eq s0 s1)
[@va_qattr]
let va_state_match (s0:va_state) (s1:va_state) : Pure Type0
(requires True)
(ensures fun b -> b ==> state_eq s0 s1)
=
FStar.Classical.move_requires (lemma_state_match s0) s1;
state_match s0 s1
[@va_qattr]
unfold let wp_sound_code_pre (#a:Type0) (#c:code) (qc:quickCode a c) (s0:va_state) (k:(s0':va_state{s0 == s0'}) -> va_state -> a -> Type0) : Type0 =
forall
(ok:bool)
(regs:Regs.t)
(vecs:Vecs.t)
(cr0:cr0_t)
(xer:xer_t)
//(mem:vale_full_heap) // splitting mem into its components makes the VCs slightly cleaner:
(mem_layout:vale_heap_layout)
(mem_heap:vale_heap)
(mem_heaplets:vale_heaplets)
(stack:machine_stack)
(stackTaint:memtaint)
.
let mem = {
vf_layout = mem_layout;
vf_heap = mem_heap;
vf_heaplets = mem_heaplets;
} in
let s0' = {
ok = ok;
regs = regs;
vecs = vecs;
cr0 = cr0;
xer = xer;
ms_heap = coerce mem;
ms_stack = stack;
ms_stackTaint = stackTaint
} in
s0 == s0' ==> QProc?.wp qc (state_eta s0') (k (state_eta s0'))
unfold let wp_sound_code_post (#a:Type0) (#c:code) (qc:quickCode a c) (s0:va_state) (k:(s0':va_state{s0 == s0'}) -> va_state -> a -> Type0) ((sN:va_state), (fN:fuel), (gN:a)) : Type0 =
eval c s0 fN sN /\
update_state_mods qc.mods sN s0 == sN /\
state_inv sN /\
k s0 sN gN | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val normal_steps:list string | [] | Vale.PPC64LE.QuickCodes.normal_steps | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Prims.list Prims.string | {
"end_col": 3,
"end_line": 427,
"start_col": 2,
"start_line": 412
} |
Prims.Tot | val wp_While_inv
(#a #d: Type)
(#c: code)
(qc: (a -> quickCode a c))
(mods: mods_t)
(inv: (va_state -> a -> Type0))
(dec: (va_state -> a -> d))
(s1: va_state)
(g1: a)
(s2: va_state)
(g2: a)
: Type0 | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let wp_While_inv
(#a #d:Type) (#c:code) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (s1:va_state) (g1:a) (s2:va_state) (g2:a)
: Type0 =
s2.ok /\ inv s2 g2 /\ mods_contains mods (qc g2).mods /\ dec s2 g2 << dec s1 g1 | val wp_While_inv
(#a #d: Type)
(#c: code)
(qc: (a -> quickCode a c))
(mods: mods_t)
(inv: (va_state -> a -> Type0))
(dec: (va_state -> a -> d))
(s1: va_state)
(g1: a)
(s2: va_state)
(g2: a)
: Type0
let wp_While_inv
(#a #d: Type)
(#c: code)
(qc: (a -> quickCode a c))
(mods: mods_t)
(inv: (va_state -> a -> Type0))
(dec: (va_state -> a -> d))
(s1: va_state)
(g1: a)
(s2: va_state)
(g2: a)
: Type0 = | false | null | false | s2.ok /\ inv s2 g2 /\ mods_contains mods (qc g2).mods /\ dec s2 g2 << dec s1 g1 | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.code",
"Vale.PPC64LE.QuickCode.quickCode",
"Vale.PPC64LE.QuickCode.mods_t",
"Vale.PPC64LE.Decls.va_state",
"Prims.l_and",
"Prims.b2t",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ok",
"Vale.PPC64LE.QuickCodes.mods_contains",
"Vale.PPC64LE.QuickCode.__proj__QProc__item__mods",
"Prims.precedes"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr]
let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2
[@va_qattr]
let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
( eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k))
val qIf_proof (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_If b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (IfElse (cmp_to_ocmp b) c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (IfElse (cmp_to_ocmp b) c1 c2) =
QProc (IfElse (cmp_to_ocmp b) c1 c2) mods (wp_If b qc1 qc2 mods) (qIf_proof b qc1 qc2 mods)
///// While
[@va_qattr]
let wp_While_inv
(#a #d:Type) (#c:code) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (s1:va_state) (g1:a) (s2:va_state) (g2:a) | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val wp_While_inv
(#a #d: Type)
(#c: code)
(qc: (a -> quickCode a c))
(mods: mods_t)
(inv: (va_state -> a -> Type0))
(dec: (va_state -> a -> d))
(s1: va_state)
(g1: a)
(s2: va_state)
(g2: a)
: Type0 | [] | Vale.PPC64LE.QuickCodes.wp_While_inv | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
qc: (_: a -> Vale.PPC64LE.QuickCode.quickCode a c) ->
mods: Vale.PPC64LE.QuickCode.mods_t ->
inv: (_: Vale.PPC64LE.Decls.va_state -> _: a -> Type0) ->
dec: (_: Vale.PPC64LE.Decls.va_state -> _: a -> d) ->
s1: Vale.PPC64LE.Decls.va_state ->
g1: a ->
s2: Vale.PPC64LE.Decls.va_state ->
g2: a
-> Type0 | {
"end_col": 81,
"end_line": 268,
"start_col": 2,
"start_line": 268
} |
Prims.Tot | val wp_proc (#a: Type0) (c: code) (qc: quickCode a c) (s0: va_state) (k: (va_state -> a -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k | val wp_proc (#a: Type0) (c: code) (qc: quickCode a c) (s0: va_state) (k: (va_state -> a -> Type0))
: Type0
let wp_proc (#a: Type0) (c: code) (qc: quickCode a c) (s0: va_state) (k: (va_state -> a -> Type0))
: Type0 = | false | null | false | match qc with | QProc _ _ wp _ -> wp s0 k | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.code",
"Vale.PPC64LE.QuickCode.quickCode",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_code",
"Vale.PPC64LE.QuickCode.mods_t",
"Vale.PPC64LE.QuickCode.quickProc_wp",
"Vale.PPC64LE.QuickCode.t_proof"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr] | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val wp_proc (#a: Type0) (c: code) (qc: quickCode a c) (s0: va_state) (k: (va_state -> a -> Type0))
: Type0 | [] | Vale.PPC64LE.QuickCodes.wp_proc | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
c: Vale.PPC64LE.QuickCodes.code ->
qc: Vale.PPC64LE.QuickCode.quickCode a c ->
s0: Vale.PPC64LE.Decls.va_state ->
k: (_: Vale.PPC64LE.Decls.va_state -> _: a -> Type0)
-> Type0 | {
"end_col": 29,
"end_line": 97,
"start_col": 2,
"start_line": 96
} |
Prims.Tot | val va_qAssertBy
(#a: Type)
(#cs: codes)
(r: range)
(msg: string)
(p: Type0)
(qcsBy: quickCodes unit [])
(qcsTail: quickCodes a cs)
: quickCodes a cs | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_qAssertBy (#a:Type) (#cs:codes) (r:range) (msg:string) (p:Type0) (qcsBy:quickCodes unit []) (qcsTail:quickCodes a cs) : quickCodes a cs =
QAssertBy r msg p qcsBy qcsTail | val va_qAssertBy
(#a: Type)
(#cs: codes)
(r: range)
(msg: string)
(p: Type0)
(qcsBy: quickCodes unit [])
(qcsTail: quickCodes a cs)
: quickCodes a cs
let va_qAssertBy
(#a: Type)
(#cs: codes)
(r: range)
(msg: string)
(p: Type0)
(qcsBy: quickCodes unit [])
(qcsTail: quickCodes a cs)
: quickCodes a cs = | false | null | false | QAssertBy r msg p qcsBy qcsTail | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.codes",
"FStar.Range.range",
"Prims.string",
"Vale.PPC64LE.QuickCodes.quickCodes",
"Prims.unit",
"Prims.Nil",
"Vale.PPC64LE.Machine_s.precode",
"Vale.PPC64LE.Decls.ins",
"Vale.PPC64LE.Decls.ocmp",
"Vale.PPC64LE.QuickCodes.QAssertBy"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr]
let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2
[@va_qattr]
let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
( eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k))
val qIf_proof (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_If b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (IfElse (cmp_to_ocmp b) c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (IfElse (cmp_to_ocmp b) c1 c2) =
QProc (IfElse (cmp_to_ocmp b) c1 c2) mods (wp_If b qc1 qc2 mods) (qIf_proof b qc1 qc2 mods)
///// While
[@va_qattr]
let wp_While_inv
(#a #d:Type) (#c:code) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (s1:va_state) (g1:a) (s2:va_state) (g2:a)
: Type0 =
s2.ok /\ inv s2 g2 /\ mods_contains mods (qc g2).mods /\ dec s2 g2 << dec s1 g1
[@va_qattr]
let wp_While_body
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g1:a) (s1:va_state) (k:va_state -> a -> Type0)
: Type0 =
valid_cmp b s1 /\
(let s1' = va_upd_cr0 (eval_cmp_cr0 s1 (cmp_to_ocmp b)) s1 in
( eval_cmp s1 b ==> mods_contains mods (qc g1).mods /\ QProc?.wp (qc g1) s1' (wp_While_inv qc mods inv dec s1 g1)) /\
(not (eval_cmp s1 b) ==> k s1' g1))
[@va_qattr]
let wp_While
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Type0 =
inv s0 g0 /\ mods_contains mods (qc g0).mods /\ mods_contains1 mods Mod_cr0 /\
// REVIEW: we could get a better WP with forall (...state components...) instead of forall (s1:va_state)
(forall (s1:va_state) (g1:a). inv s1 g1 ==> wp_While_body b qc mods inv dec g1 s1 k)
val qWhile_proof
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_While b qc mods inv dec g0 s0 k)
(ensures fun (sM, f0, g) ->
eval_code (While (cmp_to_ocmp b) c) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qWhile
(#a #d:Type) (#c:code) (mods:mods_t) (b:cmp) (qc:a -> quickCode a c) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a)
: quickCode a (While (cmp_to_ocmp b) c) =
QProc (While (cmp_to_ocmp b) c) mods (wp_While b qc mods inv dec g0)
(qWhile_proof b qc mods inv dec g0)
///// Assert, Assume, AssertBy
let tAssertLemma (p:Type0) = unit -> Lemma (requires p) (ensures p)
val qAssertLemma (p:Type0) : tAssertLemma p
[@va_qattr]
let va_qAssert (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg e (fun () -> e) (qAssertLemma e) qcs
let tAssumeLemma (p:Type0) = unit -> Lemma (requires True) (ensures p)
val qAssumeLemma (p:Type0) : tAssumeLemma p
[@va_qattr]
let va_qAssume (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg True (fun () -> e) (qAssumeLemma e) qcs
let tAssertSquashLemma (p:Type0) = unit -> Ghost (squash p) (requires p) (ensures fun () -> p)
val qAssertSquashLemma (p:Type0) : tAssertSquashLemma p
[@va_qattr]
let va_qAssertSquash
(#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:squash e -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QGhost (squash e) r msg e (fun () -> e) (qAssertSquashLemma e) qcs
//let tAssertByLemma (#a:Type) (p:Type0) (qcs:quickCodes a []) (mods:mods_t) (s0:state) =
// unit -> Lemma (requires t_require s0 /\ wp [] qcs mods (fun _ _ -> p) s0) (ensures p)
//val qAssertByLemma (#a:Type) (p:Type0) (qcs:quickCodes a []) (mods:mods_t) (s0:state) : tAssertByLemma p qcs mods s0
//
//[@va_qattr]
//let va_qAssertBy (#a:Type) (#cs:codes) (mods:mods_t) (r:range) (msg:string) (p:Type0) (qcsBy:quickCodes unit []) (s0:state) (qcsTail:quickCodes a cs) : quickCodes a cs =
// QLemma r msg (t_require s0 /\ wp [] qcsBy mods (fun _ _ -> p) s0) (fun () -> p) (qAssertByLemma p qcsBy mods s0) qcsTail
[@va_qattr] | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_qAssertBy
(#a: Type)
(#cs: codes)
(r: range)
(msg: string)
(p: Type0)
(qcsBy: quickCodes unit [])
(qcsTail: quickCodes a cs)
: quickCodes a cs | [] | Vale.PPC64LE.QuickCodes.va_qAssertBy | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
r: FStar.Range.range ->
msg: Prims.string ->
p: Type0 ->
qcsBy: Vale.PPC64LE.QuickCodes.quickCodes Prims.unit [] ->
qcsTail: Vale.PPC64LE.QuickCodes.quickCodes a cs
-> Vale.PPC64LE.QuickCodes.quickCodes a cs | {
"end_col": 33,
"end_line": 341,
"start_col": 2,
"start_line": 341
} |
Prims.Tot | val va_qAssert (#a: Type) (#cs: codes) (r: range) (msg: string) (e: Type0) (qcs: quickCodes a cs)
: quickCodes a cs | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_qAssert (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg e (fun () -> e) (qAssertLemma e) qcs | val va_qAssert (#a: Type) (#cs: codes) (r: range) (msg: string) (e: Type0) (qcs: quickCodes a cs)
: quickCodes a cs
let va_qAssert (#a: Type) (#cs: codes) (r: range) (msg: string) (e: Type0) (qcs: quickCodes a cs)
: quickCodes a cs = | false | null | false | QLemma r msg e (fun () -> e) (qAssertLemma e) qcs | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.codes",
"FStar.Range.range",
"Prims.string",
"Vale.PPC64LE.QuickCodes.quickCodes",
"Vale.PPC64LE.QuickCodes.QLemma",
"Prims.unit",
"Vale.PPC64LE.QuickCodes.qAssertLemma"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr]
let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2
[@va_qattr]
let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
( eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k))
val qIf_proof (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_If b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (IfElse (cmp_to_ocmp b) c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (IfElse (cmp_to_ocmp b) c1 c2) =
QProc (IfElse (cmp_to_ocmp b) c1 c2) mods (wp_If b qc1 qc2 mods) (qIf_proof b qc1 qc2 mods)
///// While
[@va_qattr]
let wp_While_inv
(#a #d:Type) (#c:code) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (s1:va_state) (g1:a) (s2:va_state) (g2:a)
: Type0 =
s2.ok /\ inv s2 g2 /\ mods_contains mods (qc g2).mods /\ dec s2 g2 << dec s1 g1
[@va_qattr]
let wp_While_body
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g1:a) (s1:va_state) (k:va_state -> a -> Type0)
: Type0 =
valid_cmp b s1 /\
(let s1' = va_upd_cr0 (eval_cmp_cr0 s1 (cmp_to_ocmp b)) s1 in
( eval_cmp s1 b ==> mods_contains mods (qc g1).mods /\ QProc?.wp (qc g1) s1' (wp_While_inv qc mods inv dec s1 g1)) /\
(not (eval_cmp s1 b) ==> k s1' g1))
[@va_qattr]
let wp_While
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Type0 =
inv s0 g0 /\ mods_contains mods (qc g0).mods /\ mods_contains1 mods Mod_cr0 /\
// REVIEW: we could get a better WP with forall (...state components...) instead of forall (s1:va_state)
(forall (s1:va_state) (g1:a). inv s1 g1 ==> wp_While_body b qc mods inv dec g1 s1 k)
val qWhile_proof
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_While b qc mods inv dec g0 s0 k)
(ensures fun (sM, f0, g) ->
eval_code (While (cmp_to_ocmp b) c) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qWhile
(#a #d:Type) (#c:code) (mods:mods_t) (b:cmp) (qc:a -> quickCode a c) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a)
: quickCode a (While (cmp_to_ocmp b) c) =
QProc (While (cmp_to_ocmp b) c) mods (wp_While b qc mods inv dec g0)
(qWhile_proof b qc mods inv dec g0)
///// Assert, Assume, AssertBy
let tAssertLemma (p:Type0) = unit -> Lemma (requires p) (ensures p)
val qAssertLemma (p:Type0) : tAssertLemma p
[@va_qattr] | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_qAssert (#a: Type) (#cs: codes) (r: range) (msg: string) (e: Type0) (qcs: quickCodes a cs)
: quickCodes a cs | [] | Vale.PPC64LE.QuickCodes.va_qAssert | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
r: FStar.Range.range ->
msg: Prims.string ->
e: Type0 ->
qcs: Vale.PPC64LE.QuickCodes.quickCodes a cs
-> Vale.PPC64LE.QuickCodes.quickCodes a cs | {
"end_col": 51,
"end_line": 313,
"start_col": 2,
"start_line": 313
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tAssertLemma (p:Type0) = unit -> Lemma (requires p) (ensures p) | let tAssertLemma (p: Type0) = | false | null | false | unit -> Lemma (requires p) (ensures p) | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Prims.unit",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr]
let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2
[@va_qattr]
let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
( eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k))
val qIf_proof (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_If b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (IfElse (cmp_to_ocmp b) c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (IfElse (cmp_to_ocmp b) c1 c2) =
QProc (IfElse (cmp_to_ocmp b) c1 c2) mods (wp_If b qc1 qc2 mods) (qIf_proof b qc1 qc2 mods)
///// While
[@va_qattr]
let wp_While_inv
(#a #d:Type) (#c:code) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (s1:va_state) (g1:a) (s2:va_state) (g2:a)
: Type0 =
s2.ok /\ inv s2 g2 /\ mods_contains mods (qc g2).mods /\ dec s2 g2 << dec s1 g1
[@va_qattr]
let wp_While_body
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g1:a) (s1:va_state) (k:va_state -> a -> Type0)
: Type0 =
valid_cmp b s1 /\
(let s1' = va_upd_cr0 (eval_cmp_cr0 s1 (cmp_to_ocmp b)) s1 in
( eval_cmp s1 b ==> mods_contains mods (qc g1).mods /\ QProc?.wp (qc g1) s1' (wp_While_inv qc mods inv dec s1 g1)) /\
(not (eval_cmp s1 b) ==> k s1' g1))
[@va_qattr]
let wp_While
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Type0 =
inv s0 g0 /\ mods_contains mods (qc g0).mods /\ mods_contains1 mods Mod_cr0 /\
// REVIEW: we could get a better WP with forall (...state components...) instead of forall (s1:va_state)
(forall (s1:va_state) (g1:a). inv s1 g1 ==> wp_While_body b qc mods inv dec g1 s1 k)
val qWhile_proof
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_While b qc mods inv dec g0 s0 k)
(ensures fun (sM, f0, g) ->
eval_code (While (cmp_to_ocmp b) c) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qWhile
(#a #d:Type) (#c:code) (mods:mods_t) (b:cmp) (qc:a -> quickCode a c) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a)
: quickCode a (While (cmp_to_ocmp b) c) =
QProc (While (cmp_to_ocmp b) c) mods (wp_While b qc mods inv dec g0)
(qWhile_proof b qc mods inv dec g0)
///// Assert, Assume, AssertBy | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tAssertLemma : p: Type0 -> Type0 | [] | Vale.PPC64LE.QuickCodes.tAssertLemma | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | p: Type0 -> Type0 | {
"end_col": 67,
"end_line": 308,
"start_col": 29,
"start_line": 308
} |
|
Prims.Tot | val normal (x: Type0) : Type0 | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let normal (x:Type0) : Type0 = norm [nbe; iota; zeta; simplify; primops; delta_attr [`%va_qattr]; delta_only normal_steps] x | val normal (x: Type0) : Type0
let normal (x: Type0) : Type0 = | false | null | false | norm [nbe; iota; zeta; simplify; primops; delta_attr [`%va_qattr]; delta_only normal_steps] x | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"FStar.Pervasives.norm",
"Prims.Cons",
"FStar.Pervasives.norm_step",
"FStar.Pervasives.nbe",
"FStar.Pervasives.iota",
"FStar.Pervasives.zeta",
"FStar.Pervasives.simplify",
"FStar.Pervasives.primops",
"FStar.Pervasives.delta_attr",
"Prims.string",
"Prims.Nil",
"FStar.Pervasives.delta_only",
"Vale.PPC64LE.QuickCodes.normal_steps"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr]
let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2
[@va_qattr]
let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
( eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k))
val qIf_proof (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_If b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (IfElse (cmp_to_ocmp b) c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (IfElse (cmp_to_ocmp b) c1 c2) =
QProc (IfElse (cmp_to_ocmp b) c1 c2) mods (wp_If b qc1 qc2 mods) (qIf_proof b qc1 qc2 mods)
///// While
[@va_qattr]
let wp_While_inv
(#a #d:Type) (#c:code) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (s1:va_state) (g1:a) (s2:va_state) (g2:a)
: Type0 =
s2.ok /\ inv s2 g2 /\ mods_contains mods (qc g2).mods /\ dec s2 g2 << dec s1 g1
[@va_qattr]
let wp_While_body
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g1:a) (s1:va_state) (k:va_state -> a -> Type0)
: Type0 =
valid_cmp b s1 /\
(let s1' = va_upd_cr0 (eval_cmp_cr0 s1 (cmp_to_ocmp b)) s1 in
( eval_cmp s1 b ==> mods_contains mods (qc g1).mods /\ QProc?.wp (qc g1) s1' (wp_While_inv qc mods inv dec s1 g1)) /\
(not (eval_cmp s1 b) ==> k s1' g1))
[@va_qattr]
let wp_While
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Type0 =
inv s0 g0 /\ mods_contains mods (qc g0).mods /\ mods_contains1 mods Mod_cr0 /\
// REVIEW: we could get a better WP with forall (...state components...) instead of forall (s1:va_state)
(forall (s1:va_state) (g1:a). inv s1 g1 ==> wp_While_body b qc mods inv dec g1 s1 k)
val qWhile_proof
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_While b qc mods inv dec g0 s0 k)
(ensures fun (sM, f0, g) ->
eval_code (While (cmp_to_ocmp b) c) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qWhile
(#a #d:Type) (#c:code) (mods:mods_t) (b:cmp) (qc:a -> quickCode a c) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a)
: quickCode a (While (cmp_to_ocmp b) c) =
QProc (While (cmp_to_ocmp b) c) mods (wp_While b qc mods inv dec g0)
(qWhile_proof b qc mods inv dec g0)
///// Assert, Assume, AssertBy
let tAssertLemma (p:Type0) = unit -> Lemma (requires p) (ensures p)
val qAssertLemma (p:Type0) : tAssertLemma p
[@va_qattr]
let va_qAssert (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg e (fun () -> e) (qAssertLemma e) qcs
let tAssumeLemma (p:Type0) = unit -> Lemma (requires True) (ensures p)
val qAssumeLemma (p:Type0) : tAssumeLemma p
[@va_qattr]
let va_qAssume (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg True (fun () -> e) (qAssumeLemma e) qcs
let tAssertSquashLemma (p:Type0) = unit -> Ghost (squash p) (requires p) (ensures fun () -> p)
val qAssertSquashLemma (p:Type0) : tAssertSquashLemma p
[@va_qattr]
let va_qAssertSquash
(#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:squash e -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QGhost (squash e) r msg e (fun () -> e) (qAssertSquashLemma e) qcs
//let tAssertByLemma (#a:Type) (p:Type0) (qcs:quickCodes a []) (mods:mods_t) (s0:state) =
// unit -> Lemma (requires t_require s0 /\ wp [] qcs mods (fun _ _ -> p) s0) (ensures p)
//val qAssertByLemma (#a:Type) (p:Type0) (qcs:quickCodes a []) (mods:mods_t) (s0:state) : tAssertByLemma p qcs mods s0
//
//[@va_qattr]
//let va_qAssertBy (#a:Type) (#cs:codes) (mods:mods_t) (r:range) (msg:string) (p:Type0) (qcsBy:quickCodes unit []) (s0:state) (qcsTail:quickCodes a cs) : quickCodes a cs =
// QLemma r msg (t_require s0 /\ wp [] qcsBy mods (fun _ _ -> p) s0) (fun () -> p) (qAssertByLemma p qcsBy mods s0) qcsTail
[@va_qattr]
let va_qAssertBy (#a:Type) (#cs:codes) (r:range) (msg:string) (p:Type0) (qcsBy:quickCodes unit []) (qcsTail:quickCodes a cs) : quickCodes a cs =
QAssertBy r msg p qcsBy qcsTail
///// Code
val wp_sound_code (#a:Type0) (c:code) (qc:quickCode a c) (k:va_state -> a -> Type0) (s0:va_state) :
Ghost (va_state & fuel & a)
(requires t_require s0 /\ QProc?.wp qc s0 k)
(ensures fun (sN, fN, gN) -> eval_code c s0 fN sN /\ update_state_mods qc.mods sN s0 == sN /\ state_inv sN /\ k sN gN)
[@va_qattr]
let state_match (s0:va_state) (s1:va_state) : Type0 =
s0.ok == s1.ok /\
Regs.equal s0.regs s1.regs /\
Vecs.equal s0.vecs s1.vecs /\
s0.cr0 == s1.cr0 /\
s0.xer == s1.xer /\
s0.ms_heap == s1.ms_heap /\
s0.ms_stack == s1.ms_stack /\
s0.ms_stackTaint == s1.ms_stackTaint
val lemma_state_match (s0:va_state) (s1:va_state) : Lemma
(requires state_match s0 s1)
(ensures state_eq s0 s1)
[@va_qattr]
let va_state_match (s0:va_state) (s1:va_state) : Pure Type0
(requires True)
(ensures fun b -> b ==> state_eq s0 s1)
=
FStar.Classical.move_requires (lemma_state_match s0) s1;
state_match s0 s1
[@va_qattr]
unfold let wp_sound_code_pre (#a:Type0) (#c:code) (qc:quickCode a c) (s0:va_state) (k:(s0':va_state{s0 == s0'}) -> va_state -> a -> Type0) : Type0 =
forall
(ok:bool)
(regs:Regs.t)
(vecs:Vecs.t)
(cr0:cr0_t)
(xer:xer_t)
//(mem:vale_full_heap) // splitting mem into its components makes the VCs slightly cleaner:
(mem_layout:vale_heap_layout)
(mem_heap:vale_heap)
(mem_heaplets:vale_heaplets)
(stack:machine_stack)
(stackTaint:memtaint)
.
let mem = {
vf_layout = mem_layout;
vf_heap = mem_heap;
vf_heaplets = mem_heaplets;
} in
let s0' = {
ok = ok;
regs = regs;
vecs = vecs;
cr0 = cr0;
xer = xer;
ms_heap = coerce mem;
ms_stack = stack;
ms_stackTaint = stackTaint
} in
s0 == s0' ==> QProc?.wp qc (state_eta s0') (k (state_eta s0'))
unfold let wp_sound_code_post (#a:Type0) (#c:code) (qc:quickCode a c) (s0:va_state) (k:(s0':va_state{s0 == s0'}) -> va_state -> a -> Type0) ((sN:va_state), (fN:fuel), (gN:a)) : Type0 =
eval c s0 fN sN /\
update_state_mods qc.mods sN s0 == sN /\
state_inv sN /\
k s0 sN gN
unfold let normal_steps : list string =
[
`%Mkstate?.ok;
`%Mkstate?.regs;
`%Mkstate?.vecs;
`%Mkstate?.cr0;
`%Mkstate?.xer;
`%Mkstate?.ms_heap;
`%Mkstate?.ms_stack;
`%Mkstate?.ms_stackTaint;
`%Mkvale_full_heap?.vf_layout;
`%Mkvale_full_heap?.vf_heap;
`%Mkvale_full_heap?.vf_heaplets;
`%QProc?.wp;
`%QProc?.mods;
`%FStar.FunctionalExtensionality.on_dom;
] | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val normal (x: Type0) : Type0 | [] | Vale.PPC64LE.QuickCodes.normal | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: Type0 -> Type0 | {
"end_col": 131,
"end_line": 429,
"start_col": 38,
"start_line": 429
} |
Prims.Tot | val wp_While
(#a #d: Type)
(#c: code)
(b: cmp)
(qc: (a -> quickCode a c))
(mods: mods_t)
(inv: (va_state -> a -> Type0))
(dec: (va_state -> a -> d))
(g0: a)
(s0: va_state)
(k: (va_state -> a -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let wp_While
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Type0 =
inv s0 g0 /\ mods_contains mods (qc g0).mods /\ mods_contains1 mods Mod_cr0 /\
// REVIEW: we could get a better WP with forall (...state components...) instead of forall (s1:va_state)
(forall (s1:va_state) (g1:a). inv s1 g1 ==> wp_While_body b qc mods inv dec g1 s1 k) | val wp_While
(#a #d: Type)
(#c: code)
(b: cmp)
(qc: (a -> quickCode a c))
(mods: mods_t)
(inv: (va_state -> a -> Type0))
(dec: (va_state -> a -> d))
(g0: a)
(s0: va_state)
(k: (va_state -> a -> Type0))
: Type0
let wp_While
(#a #d: Type)
(#c: code)
(b: cmp)
(qc: (a -> quickCode a c))
(mods: mods_t)
(inv: (va_state -> a -> Type0))
(dec: (va_state -> a -> d))
(g0: a)
(s0: va_state)
(k: (va_state -> a -> Type0))
: Type0 = | false | null | false | inv s0 g0 /\ mods_contains mods (qc g0).mods /\ mods_contains1 mods Mod_cr0 /\
(forall (s1: va_state) (g1: a). inv s1 g1 ==> wp_While_body b qc mods inv dec g1 s1 k) | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.code",
"Vale.PPC64LE.QuickCodes.cmp",
"Vale.PPC64LE.QuickCode.quickCode",
"Vale.PPC64LE.QuickCode.mods_t",
"Vale.PPC64LE.Decls.va_state",
"Prims.l_and",
"Prims.b2t",
"Vale.PPC64LE.QuickCodes.mods_contains",
"Vale.PPC64LE.QuickCode.__proj__QProc__item__mods",
"Vale.PPC64LE.QuickCodes.mods_contains1",
"Vale.PPC64LE.QuickCode.Mod_cr0",
"Prims.l_Forall",
"Prims.l_imp",
"Vale.PPC64LE.QuickCodes.wp_While_body"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr]
let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2
[@va_qattr]
let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
( eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k))
val qIf_proof (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_If b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (IfElse (cmp_to_ocmp b) c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (IfElse (cmp_to_ocmp b) c1 c2) =
QProc (IfElse (cmp_to_ocmp b) c1 c2) mods (wp_If b qc1 qc2 mods) (qIf_proof b qc1 qc2 mods)
///// While
[@va_qattr]
let wp_While_inv
(#a #d:Type) (#c:code) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (s1:va_state) (g1:a) (s2:va_state) (g2:a)
: Type0 =
s2.ok /\ inv s2 g2 /\ mods_contains mods (qc g2).mods /\ dec s2 g2 << dec s1 g1
[@va_qattr]
let wp_While_body
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g1:a) (s1:va_state) (k:va_state -> a -> Type0)
: Type0 =
valid_cmp b s1 /\
(let s1' = va_upd_cr0 (eval_cmp_cr0 s1 (cmp_to_ocmp b)) s1 in
( eval_cmp s1 b ==> mods_contains mods (qc g1).mods /\ QProc?.wp (qc g1) s1' (wp_While_inv qc mods inv dec s1 g1)) /\
(not (eval_cmp s1 b) ==> k s1' g1))
[@va_qattr]
let wp_While
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0) | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val wp_While
(#a #d: Type)
(#c: code)
(b: cmp)
(qc: (a -> quickCode a c))
(mods: mods_t)
(inv: (va_state -> a -> Type0))
(dec: (va_state -> a -> d))
(g0: a)
(s0: va_state)
(k: (va_state -> a -> Type0))
: Type0 | [] | Vale.PPC64LE.QuickCodes.wp_While | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
b: Vale.PPC64LE.QuickCodes.cmp ->
qc: (_: a -> Vale.PPC64LE.QuickCode.quickCode a c) ->
mods: Vale.PPC64LE.QuickCode.mods_t ->
inv: (_: Vale.PPC64LE.Decls.va_state -> _: a -> Type0) ->
dec: (_: Vale.PPC64LE.Decls.va_state -> _: a -> d) ->
g0: a ->
s0: Vale.PPC64LE.Decls.va_state ->
k: (_: Vale.PPC64LE.Decls.va_state -> _: a -> Type0)
-> Type0 | {
"end_col": 86,
"end_line": 287,
"start_col": 2,
"start_line": 285
} |
Prims.Tot | val va_qAssume (#a: Type) (#cs: codes) (r: range) (msg: string) (e: Type0) (qcs: quickCodes a cs)
: quickCodes a cs | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_qAssume (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg True (fun () -> e) (qAssumeLemma e) qcs | val va_qAssume (#a: Type) (#cs: codes) (r: range) (msg: string) (e: Type0) (qcs: quickCodes a cs)
: quickCodes a cs
let va_qAssume (#a: Type) (#cs: codes) (r: range) (msg: string) (e: Type0) (qcs: quickCodes a cs)
: quickCodes a cs = | false | null | false | QLemma r msg True (fun () -> e) (qAssumeLemma e) qcs | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.codes",
"FStar.Range.range",
"Prims.string",
"Vale.PPC64LE.QuickCodes.quickCodes",
"Vale.PPC64LE.QuickCodes.QLemma",
"Prims.l_True",
"Prims.unit",
"Vale.PPC64LE.QuickCodes.qAssumeLemma"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr]
let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2
[@va_qattr]
let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
( eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k))
val qIf_proof (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_If b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (IfElse (cmp_to_ocmp b) c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (IfElse (cmp_to_ocmp b) c1 c2) =
QProc (IfElse (cmp_to_ocmp b) c1 c2) mods (wp_If b qc1 qc2 mods) (qIf_proof b qc1 qc2 mods)
///// While
[@va_qattr]
let wp_While_inv
(#a #d:Type) (#c:code) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (s1:va_state) (g1:a) (s2:va_state) (g2:a)
: Type0 =
s2.ok /\ inv s2 g2 /\ mods_contains mods (qc g2).mods /\ dec s2 g2 << dec s1 g1
[@va_qattr]
let wp_While_body
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g1:a) (s1:va_state) (k:va_state -> a -> Type0)
: Type0 =
valid_cmp b s1 /\
(let s1' = va_upd_cr0 (eval_cmp_cr0 s1 (cmp_to_ocmp b)) s1 in
( eval_cmp s1 b ==> mods_contains mods (qc g1).mods /\ QProc?.wp (qc g1) s1' (wp_While_inv qc mods inv dec s1 g1)) /\
(not (eval_cmp s1 b) ==> k s1' g1))
[@va_qattr]
let wp_While
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Type0 =
inv s0 g0 /\ mods_contains mods (qc g0).mods /\ mods_contains1 mods Mod_cr0 /\
// REVIEW: we could get a better WP with forall (...state components...) instead of forall (s1:va_state)
(forall (s1:va_state) (g1:a). inv s1 g1 ==> wp_While_body b qc mods inv dec g1 s1 k)
val qWhile_proof
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_While b qc mods inv dec g0 s0 k)
(ensures fun (sM, f0, g) ->
eval_code (While (cmp_to_ocmp b) c) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qWhile
(#a #d:Type) (#c:code) (mods:mods_t) (b:cmp) (qc:a -> quickCode a c) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a)
: quickCode a (While (cmp_to_ocmp b) c) =
QProc (While (cmp_to_ocmp b) c) mods (wp_While b qc mods inv dec g0)
(qWhile_proof b qc mods inv dec g0)
///// Assert, Assume, AssertBy
let tAssertLemma (p:Type0) = unit -> Lemma (requires p) (ensures p)
val qAssertLemma (p:Type0) : tAssertLemma p
[@va_qattr]
let va_qAssert (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg e (fun () -> e) (qAssertLemma e) qcs
let tAssumeLemma (p:Type0) = unit -> Lemma (requires True) (ensures p)
val qAssumeLemma (p:Type0) : tAssumeLemma p
[@va_qattr] | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_qAssume (#a: Type) (#cs: codes) (r: range) (msg: string) (e: Type0) (qcs: quickCodes a cs)
: quickCodes a cs | [] | Vale.PPC64LE.QuickCodes.va_qAssume | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
r: FStar.Range.range ->
msg: Prims.string ->
e: Type0 ->
qcs: Vale.PPC64LE.QuickCodes.quickCodes a cs
-> Vale.PPC64LE.QuickCodes.quickCodes a cs | {
"end_col": 54,
"end_line": 320,
"start_col": 2,
"start_line": 320
} |
Prims.Ghost | val label (r: range) (msg: string) (p: Type0)
: Ghost Type (requires True) (ensures fun q -> q <==> p) | [
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p | val label (r: range) (msg: string) (p: Type0)
: Ghost Type (requires True) (ensures fun q -> q <==> p)
let label (r: range) (msg: string) (p: Type0)
: Ghost Type (requires True) (ensures fun q -> q <==> p) = | false | null | false | assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [] | [
"FStar.Range.range",
"Prims.string",
"Vale.PPC64LE.QuickCodes.labeled_wrap",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.l_iff",
"Prims.l_True"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"] | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val label (r: range) (msg: string) (p: Type0)
: Ghost Type (requires True) (ensures fun q -> q <==> p) | [] | Vale.PPC64LE.QuickCodes.label | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | r: FStar.Range.range -> msg: Prims.string -> p: Type0 -> Prims.Ghost Type0 | {
"end_col": 22,
"end_line": 27,
"start_col": 2,
"start_line": 26
} |
Prims.Tot | val va_qWhile
(#a #d: Type)
(#c: code)
(mods: mods_t)
(b: cmp)
(qc: (a -> quickCode a c))
(inv: (va_state -> a -> Type0))
(dec: (va_state -> a -> d))
(g0: a)
: quickCode a (While (cmp_to_ocmp b) c) | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_qWhile
(#a #d:Type) (#c:code) (mods:mods_t) (b:cmp) (qc:a -> quickCode a c) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a)
: quickCode a (While (cmp_to_ocmp b) c) =
QProc (While (cmp_to_ocmp b) c) mods (wp_While b qc mods inv dec g0)
(qWhile_proof b qc mods inv dec g0) | val va_qWhile
(#a #d: Type)
(#c: code)
(mods: mods_t)
(b: cmp)
(qc: (a -> quickCode a c))
(inv: (va_state -> a -> Type0))
(dec: (va_state -> a -> d))
(g0: a)
: quickCode a (While (cmp_to_ocmp b) c)
let va_qWhile
(#a #d: Type)
(#c: code)
(mods: mods_t)
(b: cmp)
(qc: (a -> quickCode a c))
(inv: (va_state -> a -> Type0))
(dec: (va_state -> a -> d))
(g0: a)
: quickCode a (While (cmp_to_ocmp b) c) = | false | null | false | QProc (While (cmp_to_ocmp b) c)
mods
(wp_While b qc mods inv dec g0)
(qWhile_proof b qc mods inv dec g0) | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.code",
"Vale.PPC64LE.QuickCode.mods_t",
"Vale.PPC64LE.QuickCodes.cmp",
"Vale.PPC64LE.QuickCode.quickCode",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.QuickCode.QProc",
"Vale.PPC64LE.Machine_s.While",
"Vale.PPC64LE.Decls.ins",
"Vale.PPC64LE.Decls.ocmp",
"Vale.PPC64LE.QuickCodes.cmp_to_ocmp",
"Vale.PPC64LE.QuickCodes.wp_While",
"Vale.PPC64LE.QuickCodes.qWhile_proof"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr]
let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2
[@va_qattr]
let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
( eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k))
val qIf_proof (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_If b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (IfElse (cmp_to_ocmp b) c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (IfElse (cmp_to_ocmp b) c1 c2) =
QProc (IfElse (cmp_to_ocmp b) c1 c2) mods (wp_If b qc1 qc2 mods) (qIf_proof b qc1 qc2 mods)
///// While
[@va_qattr]
let wp_While_inv
(#a #d:Type) (#c:code) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (s1:va_state) (g1:a) (s2:va_state) (g2:a)
: Type0 =
s2.ok /\ inv s2 g2 /\ mods_contains mods (qc g2).mods /\ dec s2 g2 << dec s1 g1
[@va_qattr]
let wp_While_body
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g1:a) (s1:va_state) (k:va_state -> a -> Type0)
: Type0 =
valid_cmp b s1 /\
(let s1' = va_upd_cr0 (eval_cmp_cr0 s1 (cmp_to_ocmp b)) s1 in
( eval_cmp s1 b ==> mods_contains mods (qc g1).mods /\ QProc?.wp (qc g1) s1' (wp_While_inv qc mods inv dec s1 g1)) /\
(not (eval_cmp s1 b) ==> k s1' g1))
[@va_qattr]
let wp_While
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Type0 =
inv s0 g0 /\ mods_contains mods (qc g0).mods /\ mods_contains1 mods Mod_cr0 /\
// REVIEW: we could get a better WP with forall (...state components...) instead of forall (s1:va_state)
(forall (s1:va_state) (g1:a). inv s1 g1 ==> wp_While_body b qc mods inv dec g1 s1 k)
val qWhile_proof
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_While b qc mods inv dec g0 s0 k)
(ensures fun (sM, f0, g) ->
eval_code (While (cmp_to_ocmp b) c) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qWhile
(#a #d:Type) (#c:code) (mods:mods_t) (b:cmp) (qc:a -> quickCode a c) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_qWhile
(#a #d: Type)
(#c: code)
(mods: mods_t)
(b: cmp)
(qc: (a -> quickCode a c))
(inv: (va_state -> a -> Type0))
(dec: (va_state -> a -> d))
(g0: a)
: quickCode a (While (cmp_to_ocmp b) c) | [] | Vale.PPC64LE.QuickCodes.va_qWhile | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
mods: Vale.PPC64LE.QuickCode.mods_t ->
b: Vale.PPC64LE.QuickCodes.cmp ->
qc: (_: a -> Vale.PPC64LE.QuickCode.quickCode a c) ->
inv: (_: Vale.PPC64LE.Decls.va_state -> _: a -> Type0) ->
dec: (_: Vale.PPC64LE.Decls.va_state -> _: a -> d) ->
g0: a
-> Vale.PPC64LE.QuickCode.quickCode a
(Vale.PPC64LE.Machine_s.While (Vale.PPC64LE.QuickCodes.cmp_to_ocmp b) c) | {
"end_col": 39,
"end_line": 304,
"start_col": 2,
"start_line": 303
} |
Prims.Tot | val va_qInlineIf
(#a: Type)
(#c1 #c2: code)
(mods: mods_t)
(b: bool)
(qc1: quickCode a c1)
(qc2: quickCode a c2)
: quickCode a (if_code b c1 c2) | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods) | val va_qInlineIf
(#a: Type)
(#c1 #c2: code)
(mods: mods_t)
(b: bool)
(qc1: quickCode a c1)
(qc2: quickCode a c2)
: quickCode a (if_code b c1 c2)
let va_qInlineIf
(#a: Type)
(#c1 #c2: code)
(mods: mods_t)
(b: bool)
(qc1: quickCode a c1)
(qc2: quickCode a c2)
: quickCode a (if_code b c1 c2) = | false | null | false | QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods) | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.code",
"Vale.PPC64LE.QuickCode.mods_t",
"Prims.bool",
"Vale.PPC64LE.QuickCode.quickCode",
"Vale.PPC64LE.QuickCode.QProc",
"Vale.PPC64LE.QuickCodes.if_code",
"Vale.PPC64LE.QuickCodes.wp_InlineIf",
"Vale.PPC64LE.QuickCodes.qInlineIf_proof"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr] | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_qInlineIf
(#a: Type)
(#c1 #c2: code)
(mods: mods_t)
(b: bool)
(qc1: quickCode a c1)
(qc2: quickCode a c2)
: quickCode a (if_code b c1 c2) | [] | Vale.PPC64LE.QuickCodes.va_qInlineIf | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
mods: Vale.PPC64LE.QuickCode.mods_t ->
b: Prims.bool ->
qc1: Vale.PPC64LE.QuickCode.quickCode a c1 ->
qc2: Vale.PPC64LE.QuickCode.quickCode a c2
-> Vale.PPC64LE.QuickCode.quickCode a (Vale.PPC64LE.QuickCodes.if_code b c1 c2) | {
"end_col": 92,
"end_line": 202,
"start_col": 2,
"start_line": 202
} |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.