file_name
stringlengths 5
52
| name
stringlengths 4
95
| original_source_type
stringlengths 0
23k
| source_type
stringlengths 9
23k
| source_definition
stringlengths 9
57.9k
| source
dict | source_range
dict | file_context
stringlengths 0
721k
| dependencies
dict | opens_and_abbrevs
listlengths 2
94
| vconfig
dict | interleaved
bool 1
class | verbose_type
stringlengths 1
7.42k
| effect
stringclasses 118
values | effect_flags
sequencelengths 0
2
| mutual_with
sequencelengths 0
11
| ideal_premises
sequencelengths 0
236
| proof_features
sequencelengths 0
1
| is_simple_lemma
bool 2
classes | is_div
bool 2
classes | is_proof
bool 2
classes | is_simply_typed
bool 2
classes | is_type
bool 2
classes | partial_definition
stringlengths 5
3.99k
| completed_definiton
stringlengths 1
1.63M
| isa_cross_project_example
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_state | val va_state : Type | let va_state = state | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 27,
"end_line": 59,
"start_col": 7,
"start_line": 59
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Type | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.State.state"
] | [] | false | false | false | true | true | let va_state =
| state | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.vec_opr | val vec_opr : Type0 | let vec_opr = vec | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 24,
"end_line": 64,
"start_col": 7,
"start_line": 64
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Type0 | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.vec"
] | [] | false | false | false | true | true | let vec_opr =
| vec | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_operand_reg_opr | val va_operand_reg_opr : Type0 | let va_operand_reg_opr = reg | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 35,
"end_line": 62,
"start_col": 7,
"start_line": 62
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0 | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Type0 | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.reg"
] | [] | false | false | false | true | true | let va_operand_reg_opr =
| reg | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_code | val va_code : Type0 | let va_code = precode ins ocmp | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 37,
"end_line": 56,
"start_col": 7,
"start_line": 56
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0 | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Type0 | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.precode",
"Vale.PPC64LE.Decls.ins",
"Vale.PPC64LE.Decls.ocmp"
] | [] | false | false | false | true | true | let va_code =
| precode ins ocmp | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.reg_opr | val reg_opr : Type0 | let reg_opr = reg | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 24,
"end_line": 61,
"start_col": 7,
"start_line": 61
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Type0 | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.reg"
] | [] | false | false | false | true | true | let reg_opr =
| reg | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_operand_vec_opr | val va_operand_vec_opr : Type0 | let va_operand_vec_opr = vec | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 35,
"end_line": 65,
"start_col": 7,
"start_line": 65
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Type0 | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.vec"
] | [] | false | false | false | true | true | let va_operand_vec_opr =
| vec | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_operand_heaplet | val va_operand_heaplet : Type0 | let va_operand_heaplet = heaplet_id | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 42,
"end_line": 66,
"start_col": 7,
"start_line": 66
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Type0 | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.heaplet_id"
] | [] | false | false | false | true | true | let va_operand_heaplet =
| heaplet_id | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.buffer_readable | val buffer_readable (#t: M.base_typ) (h: vale_heap) (b: M.buffer t) : GTot prop0 | val buffer_readable (#t: M.base_typ) (h: vale_heap) (b: M.buffer t) : GTot prop0 | let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 111,
"end_line": 80,
"start_col": 7,
"start_line": 80
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | h: Vale.PPC64LE.Decls.vale_heap -> b: Vale.PPC64LE.Memory.buffer t
-> Prims.GTot Vale.Def.Prop_s.prop0 | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.Arch.HeapTypes_s.base_typ",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer",
"Vale.PPC64LE.Memory.buffer_readable",
"Vale.Def.Prop_s.prop0"
] | [] | false | false | false | false | false | let buffer_readable (#t: M.base_typ) (h: vale_heap) (b: M.buffer t) : GTot prop0 =
| M.buffer_readable #t h b | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.from_heap_impl | val from_heap_impl (heap: heap_impl) : vale_full_heap | val from_heap_impl (heap: heap_impl) : vale_full_heap | let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 73,
"end_line": 24,
"start_col": 7,
"start_line": 24
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | heap: Vale.Arch.Heap.heap_impl -> Vale.Arch.HeapImpl.vale_full_heap | Prims.Tot | [
"total"
] | [] | [
"Vale.Arch.Heap.heap_impl",
"Vale.PPC64LE.Decls.coerce",
"Vale.Arch.HeapImpl.vale_full_heap"
] | [] | false | false | false | true | false | let from_heap_impl (heap: heap_impl) : vale_full_heap =
| coerce heap | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_expand_state | val va_expand_state (s: state) : state | val va_expand_state (s: state) : state | let va_expand_state (s:state) : state = s | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 60,
"end_line": 78,
"start_col": 19,
"start_line": 78
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Vale.PPC64LE.State.state -> Vale.PPC64LE.State.state | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.State.state"
] | [] | false | false | false | true | false | let va_expand_state (s: state) : state =
| s | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_mul_nat | val va_mul_nat (x y: nat) : nat | val va_mul_nat (x y: nat) : nat | let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 7,
"end_line": 76,
"start_col": 19,
"start_line": 74
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Prims.nat -> y: Prims.nat -> Prims.nat | Prims.Tot | [
"total"
] | [] | [
"Prims.nat",
"FStar.Mul.op_Star",
"Prims.unit",
"Vale.PPC64LE.Decls.mul_nat_helper"
] | [] | false | false | false | true | false | let va_mul_nat (x y: nat) : nat =
| mul_nat_helper x y;
x * y | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.buffer_writeable | val buffer_writeable (#t: M.base_typ) (b: M.buffer t) : GTot prop0 | val buffer_writeable (#t: M.base_typ) (b: M.buffer t) : GTot prop0 | let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 97,
"end_line": 81,
"start_col": 7,
"start_line": 81
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b: Vale.PPC64LE.Memory.buffer t -> Prims.GTot Vale.Def.Prop_s.prop0 | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.Arch.HeapTypes_s.base_typ",
"Vale.PPC64LE.Memory.buffer",
"Vale.PPC64LE.Memory.buffer_writeable",
"Vale.Def.Prop_s.prop0"
] | [] | false | false | false | false | false | let buffer_writeable (#t: M.base_typ) (b: M.buffer t) : GTot prop0 =
| M.buffer_writeable #t b | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.coerce | val coerce (#b #a: Type) (x: a{a == b}) : b | val coerce (#b #a: Type) (x: a{a == b}) : b | let coerce (#b #a:Type) (x:a{a == b}) : b = x | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 52,
"end_line": 22,
"start_col": 7,
"start_line": 22
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: a{a == b} -> b | Prims.Tot | [
"total"
] | [] | [
"Prims.eq2"
] | [] | false | false | false | false | false | let coerce (#b #a: Type) (x: a{a == b}) : b =
| x | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.buffer8_as_seq | val buffer8_as_seq (m: vale_heap) (b: M.buffer8) : GTot (Seq.seq nat8) | val buffer8_as_seq (m: vale_heap) (b: M.buffer8) : GTot (Seq.seq nat8) | let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 97,
"end_line": 83,
"start_col": 7,
"start_line": 83
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: Vale.PPC64LE.Decls.vale_heap -> b: Vale.PPC64LE.Memory.buffer8
-> Prims.GTot (FStar.Seq.Base.seq Vale.PPC64LE.Machine_s.nat8) | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer8",
"Vale.PPC64LE.Memory.buffer_as_seq",
"Vale.PPC64LE.Memory.vuint8",
"FStar.Seq.Base.seq",
"Vale.PPC64LE.Machine_s.nat8"
] | [] | false | false | false | false | false | let buffer8_as_seq (m: vale_heap) (b: M.buffer8) : GTot (Seq.seq nat8) =
| M.buffer_as_seq m b | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.s64 | val s64 (m: vale_heap) (b: M.buffer64) : GTot (Seq.seq nat64) | val s64 (m: vale_heap) (b: M.buffer64) : GTot (Seq.seq nat64) | let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 88,
"end_line": 85,
"start_col": 7,
"start_line": 85
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: Vale.PPC64LE.Decls.vale_heap -> b: Vale.PPC64LE.Memory.buffer64
-> Prims.GTot (FStar.Seq.Base.seq Vale.PPC64LE.Machine_s.nat64) | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer64",
"Vale.PPC64LE.Decls.buffer64_as_seq",
"FStar.Seq.Base.seq",
"Vale.PPC64LE.Machine_s.nat64"
] | [] | false | false | false | false | false | let s64 (m: vale_heap) (b: M.buffer64) : GTot (Seq.seq nat64) =
| buffer64_as_seq m b | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.buffer128_as_seq | val buffer128_as_seq (m: vale_heap) (b: M.buffer128) : GTot (Seq.seq quad32) | val buffer128_as_seq (m: vale_heap) (b: M.buffer128) : GTot (Seq.seq quad32) | let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 103,
"end_line": 86,
"start_col": 7,
"start_line": 86
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: Vale.PPC64LE.Decls.vale_heap -> b: Vale.PPC64LE.Memory.buffer128
-> Prims.GTot (FStar.Seq.Base.seq Vale.PPC64LE.Machine_s.quad32) | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer128",
"Vale.PPC64LE.Memory.buffer_as_seq",
"Vale.PPC64LE.Memory.vuint128",
"FStar.Seq.Base.seq",
"Vale.PPC64LE.Machine_s.quad32"
] | [] | false | false | false | false | false | let buffer128_as_seq (m: vale_heap) (b: M.buffer128) : GTot (Seq.seq quad32) =
| M.buffer_as_seq m b | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.buffer64_as_seq | val buffer64_as_seq (m: vale_heap) (b: M.buffer64) : GTot (Seq.seq nat64) | val buffer64_as_seq (m: vale_heap) (b: M.buffer64) : GTot (Seq.seq nat64) | let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 100,
"end_line": 84,
"start_col": 7,
"start_line": 84
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: Vale.PPC64LE.Decls.vale_heap -> b: Vale.PPC64LE.Memory.buffer64
-> Prims.GTot (FStar.Seq.Base.seq Vale.PPC64LE.Machine_s.nat64) | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer64",
"Vale.PPC64LE.Memory.buffer_as_seq",
"Vale.PPC64LE.Memory.vuint64",
"FStar.Seq.Base.seq",
"Vale.PPC64LE.Machine_s.nat64"
] | [] | false | false | false | false | false | let buffer64_as_seq (m: vale_heap) (b: M.buffer64) : GTot (Seq.seq nat64) =
| M.buffer_as_seq m b | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.buffer64_read | val buffer64_read (b: M.buffer64) (i: int) (h: vale_heap) : GTot nat64 | val buffer64_read (b: M.buffer64) (i: int) (h: vale_heap) : GTot nat64 | let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 96,
"end_line": 90,
"start_col": 7,
"start_line": 90
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b: Vale.PPC64LE.Memory.buffer64 -> i: Prims.int -> h: Vale.PPC64LE.Decls.vale_heap
-> Prims.GTot Vale.PPC64LE.Machine_s.nat64 | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.PPC64LE.Memory.buffer64",
"Prims.int",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer_read",
"Vale.PPC64LE.Memory.vuint64",
"Vale.PPC64LE.Machine_s.nat64"
] | [] | false | false | false | false | false | let buffer64_read (b: M.buffer64) (i: int) (h: vale_heap) : GTot nat64 =
| M.buffer_read b i h | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.valid_src_addr | val valid_src_addr (#t: M.base_typ) (m: vale_heap) (b: M.buffer t) (i: int) : prop0 | val valid_src_addr (#t: M.base_typ) (m: vale_heap) (b: M.buffer t) (i: int) : prop0 | let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 114,
"end_line": 88,
"start_col": 7,
"start_line": 88
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: Vale.PPC64LE.Decls.vale_heap -> b: Vale.PPC64LE.Memory.buffer t -> i: Prims.int
-> Vale.Def.Prop_s.prop0 | Prims.Tot | [
"total"
] | [] | [
"Vale.Arch.HeapTypes_s.base_typ",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer",
"Prims.int",
"Vale.PPC64LE.Memory.valid_buffer_read",
"Vale.Def.Prop_s.prop0"
] | [] | false | false | false | false | false | let valid_src_addr (#t: M.base_typ) (m: vale_heap) (b: M.buffer t) (i: int) : prop0 =
| M.valid_buffer_read m b i | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.modifies_mem | val modifies_mem (s: M.loc) (h1 h2: vale_heap) : GTot prop0 | val modifies_mem (s: M.loc) (h1 h2: vale_heap) : GTot prop0 | let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 85,
"end_line": 92,
"start_col": 7,
"start_line": 92
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Vale.PPC64LE.Memory.loc -> h1: Vale.PPC64LE.Decls.vale_heap -> h2: Vale.PPC64LE.Decls.vale_heap
-> Prims.GTot Vale.Def.Prop_s.prop0 | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.PPC64LE.Memory.loc",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.modifies",
"Vale.Def.Prop_s.prop0"
] | [] | false | false | false | false | false | let modifies_mem (s: M.loc) (h1 h2: vale_heap) : GTot prop0 =
| M.modifies s h1 h2 | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.valid_addr_mem | val valid_addr_mem (r: reg) (n: int) (s: state) : prop0 | val valid_addr_mem (r: reg) (n: int) (s: state) : prop0 | let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 99,
"end_line": 96,
"start_col": 7,
"start_line": 96
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: Vale.PPC64LE.Machine_s.reg -> n: Prims.int -> s: Vale.PPC64LE.State.state
-> Vale.Def.Prop_s.prop0 | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.reg",
"Prims.int",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.State.valid_mem",
"Vale.PPC64LE.Machine_s.Mkmaddr",
"Vale.Def.Prop_s.prop0"
] | [] | false | false | false | true | false | let valid_addr_mem (r: reg) (n: int) (s: state) : prop0 =
| valid_mem ({ address = r; offset = n }) s | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.buffer_length | val buffer_length : b: Vale.PPC64LE.Memory.buffer t -> Prims.GTot Prims.nat | let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 78,
"end_line": 82,
"start_col": 7,
"start_line": 82
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b: Vale.PPC64LE.Memory.buffer t -> Prims.GTot Prims.nat | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.Arch.HeapTypes_s.base_typ",
"Vale.PPC64LE.Memory.buffer",
"Vale.PPC64LE.Memory.buffer_length",
"Prims.nat"
] | [] | false | false | false | false | false | let buffer_length (#t: M.base_typ) (b: M.buffer t) =
| M.buffer_length #t b | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.s128 | val s128 (m: vale_heap) (b: M.buffer128) : GTot (Seq.seq quad32) | val s128 (m: vale_heap) (b: M.buffer128) : GTot (Seq.seq quad32) | let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 92,
"end_line": 87,
"start_col": 7,
"start_line": 87
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: Vale.PPC64LE.Decls.vale_heap -> b: Vale.PPC64LE.Memory.buffer128
-> Prims.GTot (FStar.Seq.Base.seq Vale.PPC64LE.Machine_s.quad32) | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer128",
"Vale.PPC64LE.Decls.buffer128_as_seq",
"FStar.Seq.Base.seq",
"Vale.PPC64LE.Machine_s.quad32"
] | [] | false | false | false | false | false | let s128 (m: vale_heap) (b: M.buffer128) : GTot (Seq.seq quad32) =
| buffer128_as_seq m b | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.loc_union | val loc_union : s1: Vale.PPC64LE.Memory.loc -> s2: Vale.PPC64LE.Memory.loc -> Prims.GTot Vale.PPC64LE.Memory.loc | let loc_union = M.loc_union | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 34,
"end_line": 95,
"start_col": 7,
"start_line": 95
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s1: Vale.PPC64LE.Memory.loc -> s2: Vale.PPC64LE.Memory.loc -> Prims.GTot Vale.PPC64LE.Memory.loc | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.PPC64LE.Memory.loc_union"
] | [] | false | false | false | false | false | let loc_union =
| M.loc_union | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.buffer128_read | val buffer128_read (b: M.buffer128) (i: int) (h: vale_heap) : GTot quad32 | val buffer128_read (b: M.buffer128) (i: int) (h: vale_heap) : GTot quad32 | let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 99,
"end_line": 91,
"start_col": 7,
"start_line": 91
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b: Vale.PPC64LE.Memory.buffer128 -> i: Prims.int -> h: Vale.PPC64LE.Decls.vale_heap
-> Prims.GTot Vale.PPC64LE.Machine_s.quad32 | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.PPC64LE.Memory.buffer128",
"Prims.int",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer_read",
"Vale.PPC64LE.Memory.vuint128",
"Vale.PPC64LE.Machine_s.quad32"
] | [] | false | false | false | false | false | let buffer128_read (b: M.buffer128) (i: int) (h: vale_heap) : GTot quad32 =
| M.buffer_read b i h | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.loc_buffer | val loc_buffer : b: Vale.PPC64LE.Memory.buffer t -> Prims.GTot Vale.PPC64LE.Memory.loc | let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 71,
"end_line": 93,
"start_col": 7,
"start_line": 93
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b: Vale.PPC64LE.Memory.buffer t -> Prims.GTot Vale.PPC64LE.Memory.loc | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.Arch.HeapTypes_s.base_typ",
"Vale.PPC64LE.Memory.buffer",
"Vale.PPC64LE.Memory.loc_buffer",
"Vale.PPC64LE.Memory.loc"
] | [] | false | false | false | false | false | let loc_buffer (#t: M.base_typ) (b: M.buffer t) =
| M.loc_buffer #t b | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.valid_buf_maddr64 | val valid_buf_maddr64
(addr: int)
(s_mem: vale_heap)
(layout: vale_heap_layout)
(b: M.buffer64)
(index: int)
(t: taint)
: prop0 | val valid_buf_maddr64
(addr: int)
(s_mem: vale_heap)
(layout: vale_heap_layout)
(b: M.buffer64)
(index: int)
(t: taint)
: prop0 | let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 43,
"end_line": 101,
"start_col": 0,
"start_line": 98
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
addr: Prims.int ->
s_mem: Vale.PPC64LE.Decls.vale_heap ->
layout: Vale.Arch.HeapImpl.vale_heap_layout ->
b: Vale.PPC64LE.Memory.buffer64 ->
index: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint
-> Vale.Def.Prop_s.prop0 | Prims.Tot | [
"total"
] | [] | [
"Prims.int",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Vale.PPC64LE.Memory.buffer64",
"Vale.Arch.HeapTypes_s.taint",
"Prims.l_and",
"Vale.PPC64LE.Decls.valid_src_addr",
"Vale.PPC64LE.Memory.vuint64",
"Vale.PPC64LE.Memory.valid_taint_buf64",
"Vale.Arch.HeapImpl.__proj__Mkvale_heap_layout__item__vl_taint",
"Prims.eq2",
"Prims.op_Addition",
"Vale.PPC64LE.Memory.buffer_addr",
"FStar.Mul.op_Star",
"Vale.Def.Prop_s.prop0"
] | [] | false | false | false | true | false | let valid_buf_maddr64
(addr: int)
(s_mem: vale_heap)
(layout: vale_heap_layout)
(b: M.buffer64)
(index: int)
(t: taint)
: prop0 =
| valid_src_addr s_mem b index /\ M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.valid_buf_maddr128 | val valid_buf_maddr128
(addr: int)
(s_mem: vale_heap)
(layout: vale_heap_layout)
(b: M.buffer128)
(index: int)
(t: taint)
: prop0 | val valid_buf_maddr128
(addr: int)
(s_mem: vale_heap)
(layout: vale_heap_layout)
(b: M.buffer128)
(index: int)
(t: taint)
: prop0 | let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 44,
"end_line": 106,
"start_col": 0,
"start_line": 103
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
addr: Prims.int ->
s_mem: Vale.PPC64LE.Decls.vale_heap ->
layout: Vale.Arch.HeapImpl.vale_heap_layout ->
b: Vale.PPC64LE.Memory.buffer128 ->
index: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint
-> Vale.Def.Prop_s.prop0 | Prims.Tot | [
"total"
] | [] | [
"Prims.int",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Vale.PPC64LE.Memory.buffer128",
"Vale.Arch.HeapTypes_s.taint",
"Prims.l_and",
"Vale.PPC64LE.Decls.valid_src_addr",
"Vale.PPC64LE.Memory.vuint128",
"Vale.PPC64LE.Memory.valid_taint_buf128",
"Vale.Arch.HeapImpl.__proj__Mkvale_heap_layout__item__vl_taint",
"Prims.eq2",
"Prims.op_Addition",
"Vale.PPC64LE.Memory.buffer_addr",
"FStar.Mul.op_Star",
"Vale.Def.Prop_s.prop0"
] | [] | false | false | false | true | false | let valid_buf_maddr128
(addr: int)
(s_mem: vale_heap)
(layout: vale_heap_layout)
(b: M.buffer128)
(index: int)
(t: taint)
: prop0 =
| valid_src_addr s_mem b index /\ M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.valid_mem_operand128 | val valid_mem_operand128 (addr: int) (t: taint) (s_mem: vale_heap) (layout: vale_heap_layout)
: prop0 | val valid_mem_operand128 (addr: int) (t: taint) (s_mem: vale_heap) (layout: vale_heap_layout)
: prop0 | let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 50,
"end_line": 114,
"start_col": 0,
"start_line": 112
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
addr: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint ->
s_mem: Vale.PPC64LE.Decls.vale_heap ->
layout: Vale.Arch.HeapImpl.vale_heap_layout
-> Vale.Def.Prop_s.prop0 | Prims.Tot | [
"total"
] | [] | [
"Prims.int",
"Vale.Arch.HeapTypes_s.taint",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Prims.l_Exists",
"Vale.PPC64LE.Memory.buffer128",
"Vale.PPC64LE.Decls.valid_buf_maddr128",
"Vale.PPC64LE.Memory.valid_buffer_read",
"Vale.PPC64LE.Memory.vuint128",
"Vale.Def.Prop_s.prop0"
] | [] | false | false | false | true | false | let valid_mem_operand128 (addr: int) (t: taint) (s_mem: vale_heap) (layout: vale_heap_layout)
: prop0 =
| exists (b: M.buffer128) (index: int). {:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_tl | val va_tl (cs: va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) | val va_tl (cs: va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) | let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 112,
"end_line": 58,
"start_col": 0,
"start_line": 58
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | cs: Vale.PPC64LE.Decls.va_codes -> Prims.Ghost Vale.PPC64LE.Decls.va_codes | Prims.Ghost | [] | [] | [
"Vale.PPC64LE.Decls.va_codes",
"Prims.__proj__Cons__item__tl",
"Vale.PPC64LE.Decls.va_code",
"Prims.b2t",
"Prims.uu___is_Cons",
"Prims.eq2",
"Prims.list"
] | [] | false | false | false | false | false | let va_tl (cs: va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) =
| Cons?.tl cs | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.valid_mem_operand64 | val valid_mem_operand64 (addr: int) (t: taint) (s_mem: vale_heap) (layout: vale_heap_layout) : prop0 | val valid_mem_operand64 (addr: int) (t: taint) (s_mem: vale_heap) (layout: vale_heap_layout) : prop0 | let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 49,
"end_line": 110,
"start_col": 0,
"start_line": 108
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
addr: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint ->
s_mem: Vale.PPC64LE.Decls.vale_heap ->
layout: Vale.Arch.HeapImpl.vale_heap_layout
-> Vale.Def.Prop_s.prop0 | Prims.Tot | [
"total"
] | [] | [
"Prims.int",
"Vale.Arch.HeapTypes_s.taint",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Prims.l_Exists",
"Vale.PPC64LE.Memory.buffer64",
"Vale.PPC64LE.Decls.valid_buf_maddr64",
"Vale.PPC64LE.Memory.valid_buffer_read",
"Vale.PPC64LE.Memory.vuint64",
"Vale.Def.Prop_s.prop0"
] | [] | false | false | false | true | false | let valid_mem_operand64 (addr: int) (t: taint) (s_mem: vale_heap) (layout: vale_heap_layout) : prop0 =
| exists (b: M.buffer64) (index: int). {:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t | false |
Vale.AES.X64.GCMencryptOpt.fsti | Vale.AES.X64.GCMencryptOpt.va_wp_Gctr_register | val va_wp_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | val va_wp_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (()))) | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 58,
"end_line": 91,
"start_col": 0,
"start_line": 78
} | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))))) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
alg: Vale.AES.AES_common_s.algorithm ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->
round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->
keys_b: Vale.X64.Memory.buffer128 ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | Prims.Tot | [
"total"
] | [] | [
"Vale.AES.AES_common_s.algorithm",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.Decls.quad32",
"Vale.X64.Memory.buffer128",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.sse_enabled",
"Prims.eq2",
"Vale.Def.Words_s.four",
"Vale.Def.Types_s.nat32",
"Vale.X64.Decls.va_get_xmm",
"Vale.Def.Words_s.Mkfour",
"Vale.AES.X64.GCMencryptOpt.aes_reqs",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Decls.va_get_mem_heaplet",
"Vale.X64.Decls.va_get_mem_layout",
"Prims.l_Forall",
"Vale.X64.Flags.t",
"Vale.X64.Memory.nat64",
"Prims.l_imp",
"Vale.Def.Types_s.nat8",
"Vale.Def.Types_s.le_seq_quad32_to_bytes",
"FStar.Seq.Base.create",
"Vale.AES.GCTR_s.gctr_encrypt_LE",
"Vale.Def.Types_s.le_quad32_to_bytes",
"Vale.Def.Types_s.reverse_bytes_quad32",
"Vale.Def.Types_s.quad32",
"Vale.AES.GCTR_s.gctr_encrypt_block",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_xmm"
] | [] | false | false | false | true | true | let va_wp_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 =
| (va_get_ok va_s0 /\
(sse_enabled /\
va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
aes_reqs alg
key
round_keys
keys_b
(va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0)
(va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0: quad32)
(va_x_xmm1: quad32)
(va_x_xmm2: quad32)
(va_x_xmm8: quad32)
(va_x_efl: Vale.X64.Flags.t)
(va_x_r12: nat64).
let va_sM =
va_upd_reg64 rR12
va_x_r12
(va_upd_flags va_x_efl
(va_upd_xmm 8
va_x_xmm8
(va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 va_s0)))))
in
va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM)
) ==
Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0)
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8
va_s0)))
alg
key /\
va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))
alg
key
0) ==>
va_k va_sM (()))) | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.locs_disjoint | val locs_disjoint : ls: Prims.list Vale.PPC64LE.Memory.loc -> Vale.Def.Prop_s.prop0 | let locs_disjoint = M.locs_disjoint | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 42,
"end_line": 94,
"start_col": 7,
"start_line": 94
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2 | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ls: Prims.list Vale.PPC64LE.Memory.loc -> Vale.Def.Prop_s.prop0 | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Memory.locs_disjoint"
] | [] | false | false | false | true | false | let locs_disjoint =
| M.locs_disjoint | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.valid_stack128 | val valid_stack128 (m: maddr) (t: taint) (s: state) : prop0 | val valid_stack128 (m: maddr) (t: taint) (s: state) : prop0 | let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 60,
"end_line": 128,
"start_col": 0,
"start_line": 127
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: Vale.PPC64LE.Machine_s.maddr -> t: Vale.Arch.HeapTypes_s.taint -> s: Vale.PPC64LE.State.state
-> Vale.Def.Prop_s.prop0 | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.maddr",
"Vale.Arch.HeapTypes_s.taint",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.Stack_i.valid_taint_stack128",
"Vale.PPC64LE.State.eval_maddr",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stackTaint",
"Vale.Def.Prop_s.prop0"
] | [] | false | false | false | true | false | let valid_stack128 (m: maddr) (t: taint) (s: state) : prop0 =
| SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.valid_stack | val valid_stack (m: maddr) (t: taint) (s: state) : prop0 | val valid_stack (m: maddr) (t: taint) (s: state) : prop0 | let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 59,
"end_line": 124,
"start_col": 0,
"start_line": 123
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: Vale.PPC64LE.Machine_s.maddr -> t: Vale.Arch.HeapTypes_s.taint -> s: Vale.PPC64LE.State.state
-> Vale.Def.Prop_s.prop0 | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.maddr",
"Vale.Arch.HeapTypes_s.taint",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.Stack_i.valid_taint_stack64",
"Vale.PPC64LE.State.eval_maddr",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stackTaint",
"Vale.Def.Prop_s.prop0"
] | [] | false | false | false | true | false | let valid_stack (m: maddr) (t: taint) (s: state) : prop0 =
| SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint | false |
Vale.AES.X64.GCMencryptOpt.fsti | Vale.AES.X64.GCMencryptOpt.va_quick_Gcm_make_length_quad | val va_quick_Gcm_make_length_quad: Prims.unit
-> (va_quickCode unit (va_code_Gcm_make_length_quad ())) | val va_quick_Gcm_make_length_quad: Prims.unit
-> (va_quickCode unit (va_code_Gcm_make_length_quad ())) | let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad) | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 63,
"end_line": 230,
"start_col": 0,
"start_line": 228
} | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g)))) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | _: Prims.unit
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.AES.X64.GCMencryptOpt.va_code_Gcm_make_length_quad ()) | Prims.Tot | [
"total"
] | [] | [
"Prims.unit",
"Vale.X64.QuickCode.va_QProc",
"Vale.AES.X64.GCMencryptOpt.va_code_Gcm_make_length_quad",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rRax",
"Vale.X64.QuickCode.va_Mod_xmm",
"Prims.Nil",
"Vale.AES.X64.GCMencryptOpt.va_wp_Gcm_make_length_quad",
"Vale.AES.X64.GCMencryptOpt.va_wpProof_Gcm_make_length_quad",
"Vale.X64.QuickCode.va_quickCode"
] | [] | false | false | false | false | false | let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
| (va_QProc (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad
va_wpProof_Gcm_make_length_quad) | false |
Vale.AES.X64.GCMencryptOpt.fsti | Vale.AES.X64.GCMencryptOpt.aes_reqs | val aes_reqs
(alg: algorithm)
(key: seq nat32)
(round_keys: seq quad32)
(keys_b: buffer128)
(key_ptr: int)
(heap0: vale_heap)
(layout: vale_heap_layout)
: prop0 | val aes_reqs
(alg: algorithm)
(key: seq nat32)
(round_keys: seq quad32)
(keys_b: buffer128)
(key_ptr: int)
(heap0: vale_heap)
(layout: vale_heap_layout)
: prop0 | let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 33,
"end_line": 55,
"start_col": 0,
"start_line": 45
} | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta | {
"checked_file": "/",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
alg: Vale.AES.AES_common_s.algorithm ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->
round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->
keys_b: Vale.X64.Memory.buffer128 ->
key_ptr: Prims.int ->
heap0: Vale.X64.InsBasic.vale_heap ->
layout: Vale.Arch.HeapImpl.vale_heap_layout
-> Vale.Def.Prop_s.prop0 | Prims.Tot | [
"total"
] | [] | [
"Vale.AES.AES_common_s.algorithm",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.Decls.quad32",
"Vale.X64.Memory.buffer128",
"Prims.int",
"Vale.X64.InsBasic.vale_heap",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.CPU_Features_s.aesni_enabled",
"Vale.X64.CPU_Features_s.avx_enabled",
"Prims.l_or",
"Prims.op_Equality",
"Vale.AES.AES_common_s.AES_128",
"Vale.AES.AES_common_s.AES_256",
"Vale.AES.AES_s.is_aes_key_LE",
"Prims.eq2",
"FStar.Seq.Base.length",
"Prims.op_Addition",
"Vale.AES.AES_common_s.nr",
"Vale.Def.Types_s.quad32",
"Vale.AES.AES_s.key_to_round_keys_LE",
"Vale.X64.Decls.validSrcAddrs128",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.s128",
"Vale.Def.Prop_s.prop0"
] | [] | false | false | false | true | false | let aes_reqs
(alg: algorithm)
(key: seq nat32)
(round_keys: seq quad32)
(keys_b: buffer128)
(key_ptr: int)
(heap0: vale_heap)
(layout: vale_heap_layout)
: prop0 =
| aesni_enabled /\ avx_enabled /\ (alg = AES_128 \/ alg = AES_256) /\ is_aes_key_LE alg key /\
length (round_keys) == nr (alg) + 1 /\ round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\ s128 heap0 keys_b == round_keys | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.valid_dst_addr | val valid_dst_addr (#t: M.base_typ) (m: vale_heap) (b: M.buffer t) (i: int) : prop0 | val valid_dst_addr (#t: M.base_typ) (m: vale_heap) (b: M.buffer t) (i: int) : prop0 | let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 115,
"end_line": 89,
"start_col": 7,
"start_line": 89
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: Vale.PPC64LE.Decls.vale_heap -> b: Vale.PPC64LE.Memory.buffer t -> i: Prims.int
-> Vale.Def.Prop_s.prop0 | Prims.Tot | [
"total"
] | [] | [
"Vale.Arch.HeapTypes_s.base_typ",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer",
"Prims.int",
"Vale.PPC64LE.Memory.valid_buffer_write",
"Vale.Def.Prop_s.prop0"
] | [] | false | false | false | false | false | let valid_dst_addr (#t: M.base_typ) (m: vale_heap) (b: M.buffer t) (i: int) : prop0 =
| M.valid_buffer_write m b i | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_if | val va_if (#a: Type) (b: bool) (x: (_: unit{b} -> a)) (y: (_: unit{~b} -> a)) : a | val va_if (#a: Type) (b: bool) (x: (_: unit{b} -> a)) (y: (_: unit{~b} -> a)) : a | let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y () | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 26,
"end_line": 48,
"start_col": 0,
"start_line": 47
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]] | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b: Prims.bool -> x: (_: Prims.unit{b} -> a) -> y: (_: Prims.unit{~b} -> a) -> a | Prims.Tot | [
"total"
] | [] | [
"Prims.bool",
"Prims.unit",
"Prims.b2t",
"Prims.l_not"
] | [] | false | false | false | false | false | let va_if (#a: Type) (b: bool) (x: (_: unit{b} -> a)) (y: (_: unit{~b} -> a)) : a =
| if b then x () else y () | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_op_reg_opr_reg | val va_op_reg_opr_reg (r: reg) : reg_opr | val va_op_reg_opr_reg (r: reg) : reg_opr | let va_op_reg_opr_reg (r:reg) : reg_opr = r | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 62,
"end_line": 132,
"start_col": 19,
"start_line": 132
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: Vale.PPC64LE.Machine_s.reg -> Vale.PPC64LE.Decls.reg_opr | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.reg",
"Vale.PPC64LE.Decls.reg_opr"
] | [] | false | false | false | true | false | let va_op_reg_opr_reg (r: reg) : reg_opr =
| r | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_op_heaplet_mem_heaplet | val va_op_heaplet_mem_heaplet (h: heaplet_id) : heaplet_id | val va_op_heaplet_mem_heaplet (h: heaplet_id) : heaplet_id | let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 80,
"end_line": 136,
"start_col": 19,
"start_line": 136
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | h: Vale.PPC64LE.Decls.heaplet_id -> Vale.PPC64LE.Decls.heaplet_id | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.heaplet_id"
] | [] | false | false | false | true | false | let va_op_heaplet_mem_heaplet (h: heaplet_id) : heaplet_id =
| h | false |
Vale.AES.X64.GCMencryptOpt.fsti | Vale.AES.X64.GCMencryptOpt.va_quick_Gctr_blocks128 | val va_quick_Gctr_blocks128
(alg: algorithm)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
: (va_quickCode unit (va_code_Gctr_blocks128 alg)) | val va_quick_Gctr_blocks128
(alg: algorithm)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
: (va_quickCode unit (va_code_Gctr_blocks128 alg)) | let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b)) | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 33,
"end_line": 192,
"start_col": 0,
"start_line": 186
} | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g)))) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
alg: Vale.AES.AES_common_s.algorithm ->
in_b: Vale.X64.Memory.buffer128 ->
out_b: Vale.X64.Memory.buffer128 ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->
round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->
keys_b: Vale.X64.Memory.buffer128
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.AES.X64.GCMencryptOpt.va_code_Gctr_blocks128 alg) | Prims.Tot | [
"total"
] | [] | [
"Vale.AES.AES_common_s.algorithm",
"Vale.X64.Memory.buffer128",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.Decls.quad32",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.AES.X64.GCMencryptOpt.va_code_Gctr_blocks128",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_mem_heaplet",
"Vale.X64.QuickCode.va_Mod_xmm",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.QuickCode.va_Mod_mem",
"Prims.Nil",
"Vale.AES.X64.GCMencryptOpt.va_wp_Gctr_blocks128",
"Vale.AES.X64.GCMencryptOpt.va_wpProof_Gctr_blocks128",
"Vale.X64.QuickCode.va_quickCode"
] | [] | false | false | false | false | false | let va_quick_Gctr_blocks128
(alg: algorithm)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
: (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
| (va_QProc (va_code_Gctr_blocks128 alg)
([
va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5;
va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10;
va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem
])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b)
(va_wpProof_Gctr_blocks128 alg in_b out_b key round_keys keys_b)) | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_const_cmp | val va_const_cmp (n: imm16) : cmp_opr | val va_const_cmp (n: imm16) : cmp_opr | let va_const_cmp (n:imm16) : cmp_opr = CImm n | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 64,
"end_line": 135,
"start_col": 19,
"start_line": 135
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | n: Vale.PPC64LE.Machine_s.imm16 -> Vale.PPC64LE.Machine_s.cmp_opr | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.imm16",
"Vale.PPC64LE.Machine_s.CImm",
"Vale.PPC64LE.Machine_s.cmp_opr"
] | [] | false | false | false | true | false | let va_const_cmp (n: imm16) : cmp_opr =
| CImm n | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_op_cmp_reg | val va_op_cmp_reg (r: reg) : cmp_opr | val va_op_cmp_reg (r: reg) : cmp_opr | let va_op_cmp_reg (r:reg) : cmp_opr = CReg r | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 63,
"end_line": 134,
"start_col": 19,
"start_line": 134
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: Vale.PPC64LE.Machine_s.reg -> Vale.PPC64LE.Machine_s.cmp_opr | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.reg",
"Vale.PPC64LE.Machine_s.CReg",
"Vale.PPC64LE.Machine_s.cmp_opr"
] | [] | false | false | false | true | false | let va_op_cmp_reg (r: reg) : cmp_opr =
| CReg r | false |
Vale.AES.X64.GCMencryptOpt.fsti | Vale.AES.X64.GCMencryptOpt.va_quick_Gctr_register | val va_quick_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
: (va_quickCode unit (va_code_Gctr_register alg)) | val va_quick_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
: (va_quickCode unit (va_code_Gctr_register alg)) | let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b)) | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 57,
"end_line": 105,
"start_col": 0,
"start_line": 101
} | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g)))) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
alg: Vale.AES.AES_common_s.algorithm ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->
round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->
keys_b: Vale.X64.Memory.buffer128
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.AES.X64.GCMencryptOpt.va_code_Gctr_register alg) | Prims.Tot | [
"total"
] | [] | [
"Vale.AES.AES_common_s.algorithm",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.Decls.quad32",
"Vale.X64.Memory.buffer128",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.AES.X64.GCMencryptOpt.va_code_Gctr_register",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rR12",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_xmm",
"Prims.Nil",
"Vale.AES.X64.GCMencryptOpt.va_wp_Gctr_register",
"Vale.AES.X64.GCMencryptOpt.va_wpProof_Gctr_register",
"Vale.X64.QuickCode.va_quickCode"
] | [] | false | false | false | false | false | let va_quick_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
: (va_quickCode unit (va_code_Gctr_register alg)) =
| (va_QProc (va_code_Gctr_register alg)
([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0])
(va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b)) | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_op_vec_opr_vec | val va_op_vec_opr_vec (v: vec) : vec_opr | val va_op_vec_opr_vec (v: vec) : vec_opr | let va_op_vec_opr_vec (v:vec) : vec_opr = v | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 62,
"end_line": 133,
"start_col": 19,
"start_line": 133
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | v: Vale.PPC64LE.Machine_s.vec -> Vale.PPC64LE.Decls.vec_opr | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.vec",
"Vale.PPC64LE.Decls.vec_opr"
] | [] | false | false | false | true | false | let va_op_vec_opr_vec (v: vec) : vec_opr =
| v | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_get_cr0 | val va_get_cr0 (s: va_state) : cr0_t | val va_get_cr0 (s: va_state) : cr0_t | let va_get_cr0 (s:va_state) : cr0_t = s.cr0 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 62,
"end_line": 144,
"start_col": 19,
"start_line": 144
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Machine_s.cr0_t | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__cr0",
"Vale.PPC64LE.Machine_s.cr0_t"
] | [] | false | false | false | true | false | let va_get_cr0 (s: va_state) : cr0_t =
| s.cr0 | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_get_ok | val va_get_ok (s: va_state) : bool | val va_get_ok (s: va_state) : bool | let va_get_ok (s:va_state) : bool = s.ok | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 59,
"end_line": 143,
"start_col": 19,
"start_line": 143
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t) | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Vale.PPC64LE.Decls.va_state -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ok",
"Prims.bool"
] | [] | false | false | false | true | false | let va_get_ok (s: va_state) : bool =
| s.ok | false |
Vale.AES.X64.GCMencryptOpt.fsti | Vale.AES.X64.GCMencryptOpt.va_wp_Gcm_make_length_quad | val va_wp_Gcm_make_length_quad (va_s0: va_state) (va_k: (va_state -> unit -> Type0)) : Type0 | val va_wp_Gcm_make_length_quad (va_s0: va_state) (va_k: (va_state -> unit -> Type0)) : Type0 | let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (()))) | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 53,
"end_line": 220,
"start_col": 0,
"start_line": 212
} | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | va_s0: Vale.X64.Decls.va_state -> va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | Prims.Tot | [
"total"
] | [] | [
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.sse_enabled",
"Prims.op_LessThan",
"Prims.op_Multiply",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.pow2_64",
"Vale.X64.Machine_s.rR11",
"Prims.l_Forall",
"Vale.X64.Decls.quad32",
"Vale.X64.Memory.nat64",
"Vale.X64.Flags.t",
"Prims.l_imp",
"Prims.eq2",
"Vale.Def.Types_s.quad32",
"Vale.X64.Decls.va_get_xmm",
"Vale.Def.Types_s.insert_nat64",
"Vale.Def.Words_s.Mkfour",
"Vale.Def.Types_s.nat32",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rRax",
"Vale.X64.Decls.va_upd_xmm"
] | [] | false | false | false | true | true | let va_wp_Gcm_make_length_quad (va_s0: va_state) (va_k: (va_state -> unit -> Type0)) : Type0 =
| (va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` (va_get_reg64 rR13 va_s0) < pow2_64 /\
8 `op_Multiply` (va_get_reg64 rR11 va_s0) < pow2_64) /\
(forall (va_x_xmm0: quad32) (va_x_rax: nat64) (va_x_efl: Vale.X64.Flags.t).
let va_sM =
va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax (va_upd_xmm 0 va_x_xmm0 va_s0))
in
va_get_ok va_sM /\
(8 `op_Multiply` (va_get_reg64 rR13 va_s0) < pow2_64 /\
8 `op_Multiply` (va_get_reg64 rR11 va_s0) < pow2_64 /\
va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0)
(8 `op_Multiply` (va_get_reg64 rR11 va_s0))
1)
(8 `op_Multiply` (va_get_reg64 rR13 va_s0))
0) ==>
va_k va_sM (()))) | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_get_reg | val va_get_reg (r: reg) (s: va_state) : nat64 | val va_get_reg (r: reg) (s: va_state) : nat64 | let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 77,
"end_line": 146,
"start_col": 19,
"start_line": 146
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0 | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: Vale.PPC64LE.Machine_s.reg -> s: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Machine_s.nat64 | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.reg",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.State.eval_reg",
"Vale.PPC64LE.Machine_s.nat64"
] | [] | false | false | false | true | false | let va_get_reg (r: reg) (s: va_state) : nat64 =
| eval_reg r s | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_opr_code_Mem64 | val va_opr_code_Mem64 (h: heaplet_id) (r: reg) (n: int) (t: taint) : tmaddr | val va_opr_code_Mem64 (h: heaplet_id) (r: reg) (n: int) (t: taint) : tmaddr | let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t) | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 30,
"end_line": 140,
"start_col": 7,
"start_line": 139
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
h: Vale.PPC64LE.Decls.heaplet_id ->
r: Vale.PPC64LE.Machine_s.reg ->
n: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint
-> Vale.PPC64LE.Machine_s.tmaddr | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.heaplet_id",
"Vale.PPC64LE.Machine_s.reg",
"Prims.int",
"Vale.Arch.HeapTypes_s.taint",
"FStar.Pervasives.Native.Mktuple2",
"Vale.PPC64LE.Machine_s.maddr",
"Vale.PPC64LE.Machine_s.Mkmaddr",
"Vale.PPC64LE.Machine_s.tmaddr"
] | [] | false | false | false | true | false | let va_opr_code_Mem64 (h: heaplet_id) (r: reg) (n: int) (t: taint) : tmaddr =
| ({ address = r; offset = n }, t) | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_get_vec | val va_get_vec (x: vec) (s: va_state) : quad32 | val va_get_vec (x: vec) (s: va_state) : quad32 | let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 78,
"end_line": 147,
"start_col": 19,
"start_line": 147
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Vale.PPC64LE.Machine_s.vec -> s: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Machine_s.quad32 | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.vec",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.State.eval_vec",
"Vale.PPC64LE.Machine_s.quad32"
] | [] | false | false | false | true | false | let va_get_vec (x: vec) (s: va_state) : quad32 =
| eval_vec x s | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_get_stack | val va_get_stack (s: va_state) : SI.vale_stack | val va_get_stack (s: va_state) : SI.vale_stack | let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 94,
"end_line": 151,
"start_col": 19,
"start_line": 151
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Stack_i.vale_stack | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Stack_Sems.stack_from_s",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stack",
"Vale.PPC64LE.Stack_i.vale_stack"
] | [] | false | false | false | true | false | let va_get_stack (s: va_state) : SI.vale_stack =
| VSS.stack_from_s s.ms_stack | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_get_xer | val va_get_xer (s: va_state) : xer_t | val va_get_xer (s: va_state) : xer_t | let va_get_xer (s:va_state) : xer_t = s.xer | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 62,
"end_line": 145,
"start_col": 19,
"start_line": 145
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Machine_s.xer_t | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__xer",
"Vale.PPC64LE.Machine_s.xer_t"
] | [] | false | false | false | true | false | let va_get_xer (s: va_state) : xer_t =
| s.xer | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_get_stackTaint | val va_get_stackTaint (s: va_state) : M.memtaint | val va_get_stackTaint (s: va_state) : M.memtaint | let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 84,
"end_line": 152,
"start_col": 19,
"start_line": 152
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Memory.memtaint | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stackTaint",
"Vale.PPC64LE.Memory.memtaint"
] | [] | false | false | false | true | false | let va_get_stackTaint (s: va_state) : M.memtaint =
| s.ms_stackTaint | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_eval_reg | val va_eval_reg (s: va_state) (r: reg) : GTot nat64 | val va_eval_reg (s: va_state) (r: reg) : GTot nat64 | let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 93,
"end_line": 155,
"start_col": 19,
"start_line": 155
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Vale.PPC64LE.Decls.va_state -> r: Vale.PPC64LE.Machine_s.reg
-> Prims.GTot Vale.PPC64LE.Machine_s.nat64 | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Machine_s.reg",
"Vale.PPC64LE.State.eval_reg",
"Vale.PPC64LE.Machine_s.nat64"
] | [] | false | false | false | false | false | let va_eval_reg (s: va_state) (r: reg) : GTot nat64 =
| eval_reg r s | false |
Vale.AES.X64.GCMencryptOpt.fsti | Vale.AES.X64.GCMencryptOpt.va_wp_Ghash_extra_bytes | val va_wp_Ghash_extra_bytes
(hkeys_b: buffer128)
(total_bytes: nat)
(old_hash h_LE: quad32)
(completed_quads: (seq quad32))
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | val va_wp_Ghash_extra_bytes
(hkeys_b: buffer128)
(total_bytes: nat)
(old_hash h_LE: quad32)
(completed_quads: (seq quad32))
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (()))) | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 53,
"end_line": 294,
"start_col": 0,
"start_line": 267
} | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0))))))))))))))) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
hkeys_b: Vale.X64.Memory.buffer128 ->
total_bytes: Prims.nat ->
old_hash: Vale.X64.Decls.quad32 ->
h_LE: Vale.X64.Decls.quad32 ->
completed_quads: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | Prims.Tot | [
"total"
] | [] | [
"Vale.X64.Memory.buffer128",
"Prims.nat",
"Vale.X64.Decls.quad32",
"FStar.Seq.Base.seq",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.pclmulqdq_enabled",
"Vale.X64.CPU_Features_s.avx_enabled",
"Vale.X64.CPU_Features_s.sse_enabled",
"Prims.eq2",
"Vale.Def.Words_s.four",
"Vale.Def.Types_s.nat32",
"Vale.X64.Decls.va_get_xmm",
"Vale.Def.Words_s.Mkfour",
"Vale.Def.Types_s.quad32",
"Vale.Def.Types_s.reverse_bytes_quad32",
"Vale.AES.GHash.ghash_incremental0",
"Vale.AES.GHash.hkeys_reqs_priv",
"Vale.X64.Decls.s128",
"Vale.X64.Decls.va_get_mem_heaplet",
"Vale.X64.Decls.validSrcAddrs128",
"Prims.op_Subtraction",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.Arch.HeapTypes_s.Secret",
"Prims.int",
"FStar.Seq.Base.length",
"Prims.op_Division",
"Prims.op_LessThan",
"Prims.op_Addition",
"Prims.op_Multiply",
"Vale.X64.Machine_s.rR10",
"Prims.op_Modulus",
"Prims.l_not",
"Vale.AES.GCM_helpers.bytes_to_quad_size",
"Prims.l_Forall",
"Vale.X64.Memory.nat64",
"Vale.X64.Flags.t",
"Prims.l_imp",
"Prims.op_GreaterThan",
"Vale.AES.GHash.ghash_incremental",
"Vale.Def.Types_s.le_bytes_to_seq_quad32",
"Vale.Def.Words_s.nat8",
"Vale.AES.GCTR_s.pad_to_128_bits",
"FStar.Seq.Base.slice",
"Vale.Def.Types_s.nat8",
"Vale.Def.Types_s.le_seq_quad32_to_bytes",
"FStar.Seq.Base.append",
"FStar.Seq.Base.create",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_xmm",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rRcx"
] | [] | false | false | false | true | true | let va_wp_Ghash_extra_bytes
(hkeys_b: buffer128)
(total_bytes: nat)
(old_hash h_LE: quad32)
(completed_quads: (seq quad32))
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 =
| (va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\
va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 ==
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
old_hash
completed_quads) /\
Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR9 va_s0 - 32)
hkeys_b
8
(va_get_mem_layout va_s0)
Secret /\ FStar.Seq.Base.length #quad32 completed_quads == total_bytes `op_Division` 16 /\
total_bytes < 16 `op_Multiply` (FStar.Seq.Base.length #quad32 completed_quads) + 16 /\
va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes `op_Modulus` 16 =!= 0 /\
(0 < total_bytes /\
total_bytes < 16 `op_Multiply` (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes)) /\
16 `op_Multiply` (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\
(forall (va_x_rcx: nat64) (va_x_r11: nat64) (va_x_xmm0: quad32) (va_x_xmm1: quad32)
(va_x_xmm2: quad32) (va_x_xmm3: quad32) (va_x_xmm4: quad32) (va_x_xmm5: quad32)
(va_x_xmm6: quad32) (va_x_xmm7: quad32) (va_x_xmm8: quad32) (va_x_efl: Vale.X64.Flags.t).
let va_sM =
va_upd_flags va_x_efl
(va_upd_xmm 8
va_x_xmm8
(va_upd_xmm 7
va_x_xmm7
(va_upd_xmm 6
va_x_xmm6
(va_upd_xmm 5
va_x_xmm5
(va_upd_xmm 4
va_x_xmm4
(va_upd_xmm 3
va_x_xmm3
(va_upd_xmm 2
va_x_xmm2
(va_upd_xmm 1
va_x_xmm1
(va_upd_xmm 0
va_x_xmm0
(va_upd_reg64 rR11
va_x_r11
(va_upd_reg64 rRcx va_x_rcx va_s0)))))))))))
in
va_get_ok va_sM /\
(let raw_quads =
FStar.Seq.Base.append #quad32
completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0))
in
let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0
total_bytes
in
let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in
let input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in
total_bytes > 0 ==>
l_and (FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) ==
Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads)) ==>
va_k va_sM (()))) | false |
Vale.AES.X64.GCMencryptOpt.fsti | Vale.AES.X64.GCMencryptOpt.va_quick_Ghash_extra_bytes | val va_quick_Ghash_extra_bytes
(hkeys_b: buffer128)
(total_bytes: nat)
(old_hash h_LE: quad32)
(completed_quads: (seq quad32))
: (va_quickCode unit (va_code_Ghash_extra_bytes ())) | val va_quick_Ghash_extra_bytes
(hkeys_b: buffer128)
(total_bytes: nat)
(old_hash h_LE: quad32)
(completed_quads: (seq quad32))
: (va_quickCode unit (va_code_Ghash_extra_bytes ())) | let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads)) | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 21,
"end_line": 313,
"start_col": 0,
"start_line": 306
} | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g)))) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
hkeys_b: Vale.X64.Memory.buffer128 ->
total_bytes: Prims.nat ->
old_hash: Vale.X64.Decls.quad32 ->
h_LE: Vale.X64.Decls.quad32 ->
completed_quads: FStar.Seq.Base.seq Vale.X64.Decls.quad32
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.AES.X64.GCMencryptOpt.va_code_Ghash_extra_bytes ()) | Prims.Tot | [
"total"
] | [] | [
"Vale.X64.Memory.buffer128",
"Prims.nat",
"Vale.X64.Decls.quad32",
"FStar.Seq.Base.seq",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.AES.X64.GCMencryptOpt.va_code_Ghash_extra_bytes",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_xmm",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rRcx",
"Prims.Nil",
"Vale.AES.X64.GCMencryptOpt.va_wp_Ghash_extra_bytes",
"Vale.AES.X64.GCMencryptOpt.va_wpProof_Ghash_extra_bytes",
"Vale.X64.QuickCode.va_quickCode"
] | [] | false | false | false | false | false | let va_quick_Ghash_extra_bytes
(hkeys_b: buffer128)
(total_bytes: nat)
(old_hash h_LE: quad32)
(completed_quads: (seq quad32))
: (va_quickCode unit (va_code_Ghash_extra_bytes ())) =
| (va_QProc (va_code_Ghash_extra_bytes ())
([
va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx
])
(va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE completed_quads)
(va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE completed_quads)) | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_eval_reg_opr | val va_eval_reg_opr (s: va_state) (r: reg_opr) : GTot nat64 | val va_eval_reg_opr (s: va_state) (r: reg_opr) : GTot nat64 | let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 98,
"end_line": 157,
"start_col": 19,
"start_line": 157
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Vale.PPC64LE.Decls.va_state -> r: Vale.PPC64LE.Decls.reg_opr
-> Prims.GTot Vale.PPC64LE.Machine_s.nat64 | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.reg_opr",
"Vale.PPC64LE.State.eval_reg",
"Vale.PPC64LE.Machine_s.nat64"
] | [] | false | false | false | false | false | let va_eval_reg_opr (s: va_state) (r: reg_opr) : GTot nat64 =
| eval_reg r s | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_eval_heaplet | val va_eval_heaplet (s: va_state) (h: heaplet_id) : vale_heap | val va_eval_heaplet (s: va_state) (h: heaplet_id) : vale_heap | let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 103,
"end_line": 160,
"start_col": 19,
"start_line": 160
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Vale.PPC64LE.Decls.va_state -> h: Vale.PPC64LE.Decls.heaplet_id -> Vale.PPC64LE.Decls.vale_heap | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.heaplet_id",
"Vale.PPC64LE.Decls.va_get_mem_heaplet",
"Vale.PPC64LE.Decls.vale_heap"
] | [] | false | false | false | true | false | let va_eval_heaplet (s: va_state) (h: heaplet_id) : vale_heap =
| va_get_mem_heaplet h s | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_is_dst_reg_opr | val va_is_dst_reg_opr : r: Vale.PPC64LE.Decls.reg_opr -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 72,
"end_line": 164,
"start_col": 19,
"start_line": 164
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: Vale.PPC64LE.Decls.reg_opr -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.reg_opr",
"Vale.PPC64LE.Decls.va_state",
"Prims.l_True",
"Prims.logical"
] | [] | false | false | false | true | true | let va_is_dst_reg_opr (r: reg_opr) (s: va_state) =
| True | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_is_src_heaplet | val va_is_src_heaplet : h: Vale.PPC64LE.Decls.heaplet_id -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 75,
"end_line": 169,
"start_col": 19,
"start_line": 169
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | h: Vale.PPC64LE.Decls.heaplet_id -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.heaplet_id",
"Vale.PPC64LE.Decls.va_state",
"Prims.l_True",
"Prims.logical"
] | [] | false | false | false | true | true | let va_is_src_heaplet (h: heaplet_id) (s: va_state) =
| True | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_is_dst_Mem64 | val va_is_dst_Mem64 : m: Vale.PPC64LE.Machine_s.maddr -> s: Vale.PPC64LE.Decls.va_state -> Vale.Def.Prop_s.prop0 | let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 77,
"end_line": 166,
"start_col": 19,
"start_line": 166
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: Vale.PPC64LE.Machine_s.maddr -> s: Vale.PPC64LE.Decls.va_state -> Vale.Def.Prop_s.prop0 | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.maddr",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.State.valid_mem",
"Vale.Def.Prop_s.prop0"
] | [] | false | false | false | true | false | let va_is_dst_Mem64 (m: maddr) (s: va_state) =
| valid_mem m s | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_eval_vec_opr | val va_eval_vec_opr (s: va_state) (v: vec_opr) : GTot quad32 | val va_eval_vec_opr (s: va_state) (v: vec_opr) : GTot quad32 | let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 102,
"end_line": 159,
"start_col": 19,
"start_line": 159
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Vale.PPC64LE.Decls.va_state -> v: Vale.PPC64LE.Decls.vec_opr
-> Prims.GTot Vale.PPC64LE.Machine_s.quad32 | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.vec_opr",
"Vale.PPC64LE.State.eval_vec",
"Vale.PPC64LE.Machine_s.quad32"
] | [] | false | false | false | false | false | let va_eval_vec_opr (s: va_state) (v: vec_opr) : GTot quad32 =
| eval_vec v s | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_is_dst_heaplet | val va_is_dst_heaplet : h: Vale.PPC64LE.Decls.heaplet_id -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 75,
"end_line": 170,
"start_col": 19,
"start_line": 170
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | h: Vale.PPC64LE.Decls.heaplet_id -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.heaplet_id",
"Vale.PPC64LE.Decls.va_state",
"Prims.l_True",
"Prims.logical"
] | [] | false | false | false | true | true | let va_is_dst_heaplet (h: heaplet_id) (s: va_state) =
| True | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_is_src_vec_opr | val va_is_src_vec_opr : v: Vale.PPC64LE.Decls.vec_opr -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 72,
"end_line": 167,
"start_col": 19,
"start_line": 167
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | v: Vale.PPC64LE.Decls.vec_opr -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.vec_opr",
"Vale.PPC64LE.Decls.va_state",
"Prims.l_True",
"Prims.logical"
] | [] | false | false | false | true | true | let va_is_src_vec_opr (v: vec_opr) (s: va_state) =
| True | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_is_src_reg_opr | val va_is_src_reg_opr : r: Vale.PPC64LE.Decls.reg_opr -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 72,
"end_line": 163,
"start_col": 19,
"start_line": 163
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: Vale.PPC64LE.Decls.reg_opr -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.reg_opr",
"Vale.PPC64LE.Decls.va_state",
"Prims.l_True",
"Prims.logical"
] | [] | false | false | false | true | true | let va_is_src_reg_opr (r: reg_opr) (s: va_state) =
| True | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_eval_Mem64 | val va_eval_Mem64 (s: va_state) (m: maddr) : GTot nat64 | val va_eval_Mem64 (s: va_state) (m: maddr) : GTot nat64 | let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 107,
"end_line": 156,
"start_col": 19,
"start_line": 156
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Vale.PPC64LE.Decls.va_state -> m: Vale.PPC64LE.Machine_s.maddr
-> Prims.GTot Vale.PPC64LE.Machine_s.nat64 | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Machine_s.maddr",
"Vale.PPC64LE.State.eval_mem",
"Vale.PPC64LE.State.eval_maddr",
"Vale.PPC64LE.Machine_s.nat64"
] | [] | false | false | false | false | false | let va_eval_Mem64 (s: va_state) (m: maddr) : GTot nat64 =
| eval_mem (eval_maddr m s) s | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_is_src_Mem64 | val va_is_src_Mem64 : m: Vale.PPC64LE.Machine_s.maddr -> s: Vale.PPC64LE.Decls.va_state -> Vale.Def.Prop_s.prop0 | let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 77,
"end_line": 165,
"start_col": 19,
"start_line": 165
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: Vale.PPC64LE.Machine_s.maddr -> s: Vale.PPC64LE.Decls.va_state -> Vale.Def.Prop_s.prop0 | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.maddr",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.State.valid_mem",
"Vale.Def.Prop_s.prop0"
] | [] | false | false | false | true | false | let va_is_src_Mem64 (m: maddr) (s: va_state) =
| valid_mem m s | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_upd_ok | val va_upd_ok (ok: bool) (s: state) : state | val va_upd_ok (ok: bool) (s: state) : state | let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok } | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 74,
"end_line": 172,
"start_col": 12,
"start_line": 172
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ok: Prims.bool -> s: Vale.PPC64LE.State.state -> Vale.PPC64LE.State.state | Prims.Tot | [
"total"
] | [] | [
"Prims.bool",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.Machine_s.Mkstate",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__regs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__vecs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__cr0",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__xer",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_heap",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stack",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stackTaint"
] | [] | false | false | false | true | false | let va_upd_ok (ok: bool) (s: state) : state =
| { s with ok = ok } | false |
Vale.AES.X64.GCMencryptOpt.fsti | Vale.AES.X64.GCMencryptOpt.va_quick_Gcm_blocks_auth | val va_quick_Gcm_blocks_auth (auth_b abytes_b hkeys_b: buffer128) (h_LE: quad32)
: (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) | val va_quick_Gcm_blocks_auth (auth_b abytes_b hkeys_b: buffer128) (h_LE: quad32)
: (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) | let va_quick_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128)
(h_LE:quad32) : (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) =
(va_QProc (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;
va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRdx]) (va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)) | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 62,
"end_line": 407,
"start_col": 0,
"start_line": 401
} | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads))
//--
//-- Gcm_blocks_auth
val va_code_Gcm_blocks_auth : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_blocks_auth : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_blocks_auth : va_b0:va_code -> va_s0:va_state -> auth_b:buffer128 ->
abytes_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_require_total va_b0 (va_code_Gcm_blocks_auth ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi
va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE))))
(ensures (fun (va_sM, va_fM, auth_quad_seq) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)
auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let
(padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\ va_get_xmm 8
va_sM == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) auth_quad_seq))) /\ va_state_eq va_sM
(va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM
(va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM
(va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_flags va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM va_s0)))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)
(va_s0:va_state) (va_k:(va_state -> (seq quad32) -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\ (forall (va_x_rdx:nat64)
(va_x_r11:nat64) (va_x_r10:nat64) (va_x_rcx:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(auth_quad_seq:(seq quad32)) . let va_sM = va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8
(va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4
(va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0
(va_upd_flags va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64
rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdx va_x_rdx va_s0))))))))))))))) in
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1
va_sM) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64
rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits
auth_input_bytes in auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes
/\ va_get_xmm 8 va_sM == Vale.Def.Types_s.reverse_bytes_quad32
(Vale.AES.GHash.ghash_incremental0 h_LE (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0
0) auth_quad_seq))) ==> va_k va_sM ((auth_quad_seq))))
val va_wpProof_Gcm_blocks_auth : auth_b:buffer128 -> abytes_b:buffer128 -> hkeys_b:buffer128 ->
h_LE:quad32 -> va_s0:va_state -> va_k:(va_state -> (seq quad32) -> Type0)
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64
rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx]) va_s0 va_k ((va_sM, va_f0, va_g)))) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
auth_b: Vale.X64.Memory.buffer128 ->
abytes_b: Vale.X64.Memory.buffer128 ->
hkeys_b: Vale.X64.Memory.buffer128 ->
h_LE: Vale.X64.Decls.quad32
-> Vale.X64.QuickCode.va_quickCode (FStar.Seq.Base.seq Vale.X64.Decls.quad32)
(Vale.AES.X64.GCMencryptOpt.va_code_Gcm_blocks_auth ()) | Prims.Tot | [
"total"
] | [] | [
"Vale.X64.Memory.buffer128",
"Vale.X64.Decls.quad32",
"Vale.X64.QuickCode.va_QProc",
"FStar.Seq.Base.seq",
"Vale.AES.X64.GCMencryptOpt.va_code_Gcm_blocks_auth",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_xmm",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rR15",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rRdx",
"Prims.Nil",
"Vale.AES.X64.GCMencryptOpt.va_wp_Gcm_blocks_auth",
"Vale.AES.X64.GCMencryptOpt.va_wpProof_Gcm_blocks_auth",
"Vale.X64.QuickCode.va_quickCode"
] | [] | false | false | false | false | false | let va_quick_Gcm_blocks_auth (auth_b abytes_b hkeys_b: buffer128) (h_LE: quad32)
: (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) =
| (va_QProc (va_code_Gcm_blocks_auth ())
([
va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15;
va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx
])
(va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)) | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_eval_cmp_opr | val va_eval_cmp_opr (s: va_state) (o: cmp_opr) : GTot nat64 | val va_eval_cmp_opr (s: va_state) (o: cmp_opr) : GTot nat64 | let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 105,
"end_line": 158,
"start_col": 19,
"start_line": 158
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Vale.PPC64LE.Decls.va_state -> o: Vale.PPC64LE.Machine_s.cmp_opr
-> Prims.GTot Vale.PPC64LE.Machine_s.nat64 | Prims.GTot | [
"sometrivial"
] | [] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Machine_s.cmp_opr",
"Vale.PPC64LE.State.eval_cmp_opr",
"Vale.PPC64LE.Machine_s.nat64"
] | [] | false | false | false | false | false | let va_eval_cmp_opr (s: va_state) (o: cmp_opr) : GTot nat64 =
| eval_cmp_opr o s | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_upd_stack | val va_upd_stack (stack: SI.vale_stack) (s: state) : state | val va_upd_stack (stack: SI.vale_stack) (s: state) : state | let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) } | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 115,
"end_line": 181,
"start_col": 12,
"start_line": 181
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state = | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | stack: Vale.PPC64LE.Stack_i.vale_stack -> s: Vale.PPC64LE.State.state -> Vale.PPC64LE.State.state | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Stack_i.vale_stack",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.Machine_s.Mkstate",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ok",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__regs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__vecs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__cr0",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__xer",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_heap",
"Vale.PPC64LE.Stack_Sems.stack_to_s",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stackTaint"
] | [] | false | false | false | true | false | let va_upd_stack (stack: SI.vale_stack) (s: state) : state =
| { s with ms_stack = (VSS.stack_to_s stack) } | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_upd_reg | val va_upd_reg (r: reg) (v: nat64) (s: state) : state | val va_upd_reg (r: reg) (v: nat64) (s: state) : state | let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 81,
"end_line": 175,
"start_col": 12,
"start_line": 175
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 } | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: Vale.PPC64LE.Machine_s.reg -> v: Vale.PPC64LE.Machine_s.nat64 -> s: Vale.PPC64LE.State.state
-> Vale.PPC64LE.State.state | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.reg",
"Vale.PPC64LE.Machine_s.nat64",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.State.update_reg"
] | [] | false | false | false | true | false | let va_upd_reg (r: reg) (v: nat64) (s: state) : state =
| update_reg r v s | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_is_dst_vec_opr | val va_is_dst_vec_opr : v: Vale.PPC64LE.Decls.vec_opr -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 72,
"end_line": 168,
"start_col": 19,
"start_line": 168
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | v: Vale.PPC64LE.Decls.vec_opr -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.vec_opr",
"Vale.PPC64LE.Decls.va_state",
"Prims.l_True",
"Prims.logical"
] | [] | false | false | false | true | true | let va_is_dst_vec_opr (v: vec_opr) (s: va_state) =
| True | false |
|
Vale.AES.X64.GCMencryptOpt.fsti | Vale.AES.X64.GCMencryptOpt.va_quick_Save_registers | val va_quick_Save_registers (win: bool) : (va_quickCode unit (va_code_Save_registers win)) | val va_quick_Save_registers (win: bool) : (va_quickCode unit (va_code_Save_registers win)) | let va_quick_Save_registers (win:bool) : (va_quickCode unit (va_code_Save_registers win)) =
(va_QProc (va_code_Save_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack;
va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) (va_wp_Save_registers win) (va_wpProof_Save_registers
win)) | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 9,
"end_line": 542,
"start_col": 0,
"start_line": 539
} | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads))
//--
//-- Gcm_blocks_auth
val va_code_Gcm_blocks_auth : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_blocks_auth : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_blocks_auth : va_b0:va_code -> va_s0:va_state -> auth_b:buffer128 ->
abytes_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_require_total va_b0 (va_code_Gcm_blocks_auth ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi
va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE))))
(ensures (fun (va_sM, va_fM, auth_quad_seq) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)
auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let
(padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\ va_get_xmm 8
va_sM == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) auth_quad_seq))) /\ va_state_eq va_sM
(va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM
(va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM
(va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_flags va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM va_s0)))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)
(va_s0:va_state) (va_k:(va_state -> (seq quad32) -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\ (forall (va_x_rdx:nat64)
(va_x_r11:nat64) (va_x_r10:nat64) (va_x_rcx:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(auth_quad_seq:(seq quad32)) . let va_sM = va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8
(va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4
(va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0
(va_upd_flags va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64
rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdx va_x_rdx va_s0))))))))))))))) in
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1
va_sM) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64
rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits
auth_input_bytes in auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes
/\ va_get_xmm 8 va_sM == Vale.Def.Types_s.reverse_bytes_quad32
(Vale.AES.GHash.ghash_incremental0 h_LE (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0
0) auth_quad_seq))) ==> va_k va_sM ((auth_quad_seq))))
val va_wpProof_Gcm_blocks_auth : auth_b:buffer128 -> abytes_b:buffer128 -> hkeys_b:buffer128 ->
h_LE:quad32 -> va_s0:va_state -> va_k:(va_state -> (seq quad32) -> Type0)
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64
rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128)
(h_LE:quad32) : (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) =
(va_QProc (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;
va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRdx]) (va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE))
//--
//-- Save_registers
val va_code_Save_registers : win:bool -> Tot va_code
val va_codegen_success_Save_registers : win:bool -> Tot va_pbool
val va_lemma_Save_registers : va_b0:va_code -> va_s0:va_state -> win:bool
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Save_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + (if win then (10
`op_Multiply` 2) else 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s
(va_get_reg64 rRsp va_sM) (8 + (if win then (10 `op_Multiply` 2) else 0)) (va_get_stack va_sM)
Secret (va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM /\ va_state_eq va_sM
(va_update_stackTaint va_sM (va_update_flags va_sM (va_update_stack va_sM (va_update_reg64 rRsp
va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Save_registers (win:bool) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ (forall (va_x_rax:nat64) (va_x_rsp:nat64) (va_x_stack:vale_stack)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_stack va_x_stack (va_upd_reg64 rRsp va_x_rsp
(va_upd_reg64 rRax va_x_rax va_s0)))) in va_get_ok va_sM /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _
-> 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_sM) (8 +
va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_sM) Secret
(va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 24 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM ==> va_k va_sM
(())))
val va_wpProof_Save_registers : win:bool -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Save_registers win va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Save_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack; va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) va_s0
va_k ((va_sM, va_f0, va_g)))) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | win: Prims.bool
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.AES.X64.GCMencryptOpt.va_code_Save_registers win) | Prims.Tot | [
"total"
] | [] | [
"Prims.bool",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.AES.X64.GCMencryptOpt.va_code_Save_registers",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_stackTaint",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_stack",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Machine_s.rRax",
"Prims.Nil",
"Vale.AES.X64.GCMencryptOpt.va_wp_Save_registers",
"Vale.AES.X64.GCMencryptOpt.va_wpProof_Save_registers",
"Vale.X64.QuickCode.va_quickCode"
] | [] | false | false | false | false | false | let va_quick_Save_registers (win: bool) : (va_quickCode unit (va_code_Save_registers win)) =
| (va_QProc (va_code_Save_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack; va_Mod_reg64 rRsp; va_Mod_reg64 rRax])
(va_wp_Save_registers win)
(va_wpProof_Save_registers win)) | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_upd_vec | val va_upd_vec (x: vec) (v: quad32) (s: state) : state | val va_upd_vec (x: vec) (v: quad32) (s: state) : state | let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 82,
"end_line": 176,
"start_col": 12,
"start_line": 176
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer } | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Vale.PPC64LE.Machine_s.vec -> v: Vale.PPC64LE.Machine_s.quad32 -> s: Vale.PPC64LE.State.state
-> Vale.PPC64LE.State.state | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.vec",
"Vale.PPC64LE.Machine_s.quad32",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.State.update_vec"
] | [] | false | false | false | true | false | let va_upd_vec (x: vec) (v: quad32) (s: state) : state =
| update_vec x v s | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_upd_xer | val va_upd_xer (xer: xer_t) (s: state) : state | val va_upd_xer (xer: xer_t) (s: state) : state | let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer } | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 79,
"end_line": 174,
"start_col": 12,
"start_line": 174
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok } | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | xer: Vale.PPC64LE.Machine_s.xer_t -> s: Vale.PPC64LE.State.state -> Vale.PPC64LE.State.state | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.xer_t",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.Machine_s.Mkstate",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ok",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__regs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__vecs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__cr0",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_heap",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stack",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stackTaint"
] | [] | false | false | false | true | false | let va_upd_xer (xer: xer_t) (s: state) : state =
| { s with xer = xer } | false |
Vale.AES.X64.GCMencryptOpt.fsti | Vale.AES.X64.GCMencryptOpt.va_wp_Save_registers | val va_wp_Save_registers (win: bool) (va_s0: va_state) (va_k: (va_state -> unit -> Type0)) : Type0 | val va_wp_Save_registers (win: bool) (va_s0: va_state) (va_k: (va_state -> unit -> Type0)) : Type0 | let va_wp_Save_registers (win:bool) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ (forall (va_x_rax:nat64) (va_x_rsp:nat64) (va_x_stack:vale_stack)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_stack va_x_stack (va_upd_reg64 rRsp va_x_rsp
(va_upd_reg64 rRax va_x_rax va_s0)))) in va_get_ok va_sM /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _
-> 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_sM) (8 +
va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_sM) Secret
(va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 24 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM ==> va_k va_sM
(()))) | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 10,
"end_line": 530,
"start_col": 0,
"start_line": 473
} | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads))
//--
//-- Gcm_blocks_auth
val va_code_Gcm_blocks_auth : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_blocks_auth : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_blocks_auth : va_b0:va_code -> va_s0:va_state -> auth_b:buffer128 ->
abytes_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_require_total va_b0 (va_code_Gcm_blocks_auth ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi
va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE))))
(ensures (fun (va_sM, va_fM, auth_quad_seq) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)
auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let
(padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\ va_get_xmm 8
va_sM == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) auth_quad_seq))) /\ va_state_eq va_sM
(va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM
(va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM
(va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_flags va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM va_s0)))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)
(va_s0:va_state) (va_k:(va_state -> (seq quad32) -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\ (forall (va_x_rdx:nat64)
(va_x_r11:nat64) (va_x_r10:nat64) (va_x_rcx:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(auth_quad_seq:(seq quad32)) . let va_sM = va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8
(va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4
(va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0
(va_upd_flags va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64
rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdx va_x_rdx va_s0))))))))))))))) in
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1
va_sM) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64
rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits
auth_input_bytes in auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes
/\ va_get_xmm 8 va_sM == Vale.Def.Types_s.reverse_bytes_quad32
(Vale.AES.GHash.ghash_incremental0 h_LE (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0
0) auth_quad_seq))) ==> va_k va_sM ((auth_quad_seq))))
val va_wpProof_Gcm_blocks_auth : auth_b:buffer128 -> abytes_b:buffer128 -> hkeys_b:buffer128 ->
h_LE:quad32 -> va_s0:va_state -> va_k:(va_state -> (seq quad32) -> Type0)
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64
rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128)
(h_LE:quad32) : (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) =
(va_QProc (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;
va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRdx]) (va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE))
//--
//-- Save_registers
val va_code_Save_registers : win:bool -> Tot va_code
val va_codegen_success_Save_registers : win:bool -> Tot va_pbool
val va_lemma_Save_registers : va_b0:va_code -> va_s0:va_state -> win:bool
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Save_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + (if win then (10
`op_Multiply` 2) else 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s
(va_get_reg64 rRsp va_sM) (8 + (if win then (10 `op_Multiply` 2) else 0)) (va_get_stack va_sM)
Secret (va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM /\ va_state_eq va_sM
(va_update_stackTaint va_sM (va_update_flags va_sM (va_update_stack va_sM (va_update_reg64 rRsp
va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM va_s0)))))))) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
win: Prims.bool ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | Prims.Tot | [
"total"
] | [] | [
"Prims.bool",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.sse_enabled",
"Prims.eq2",
"Vale.Def.Words_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Stack_i.init_rsp",
"Vale.X64.Decls.va_get_stack",
"Prims.l_Forall",
"Vale.X64.Memory.nat64",
"Vale.X64.InsBasic.vale_stack",
"Vale.X64.Flags.t",
"Vale.X64.Memory.memtaint",
"Prims.l_imp",
"Prims.int",
"Prims.op_Subtraction",
"Prims.op_Multiply",
"Prims.op_Addition",
"Vale.X64.Decls.va_if",
"Prims.l_not",
"Prims.l_or",
"Prims.op_GreaterThanOrEqual",
"Vale.X64.Stack_i.valid_stack_slot64s",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.va_get_stackTaint",
"Vale.X64.Stack_i.modifies_stack",
"Vale.X64.Stack_i.modifies_stacktaint",
"Vale.X64.Stack_i.load_stack64",
"Vale.Arch.Types.hi64",
"Vale.X64.Decls.va_get_xmm",
"Vale.Arch.Types.lo64",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR15",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_stackTaint",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_stack",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rRax"
] | [] | false | false | false | true | true | let va_wp_Save_registers (win: bool) (va_s0: va_state) (va_k: (va_state -> unit -> Type0)) : Type0 =
| (va_get_ok va_s0 /\ sse_enabled /\
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
(forall (va_x_rax: nat64)
(va_x_rsp: nat64)
(va_x_stack: vale_stack)
(va_x_efl: Vale.X64.Flags.t)
(va_x_stackTaint: memtaint).
let va_sM =
va_upd_stackTaint va_x_stackTaint
(va_upd_flags va_x_efl
(va_upd_stack va_x_stack
(va_upd_reg64 rRsp va_x_rsp (va_upd_reg64 rRax va_x_rax va_s0))))
in
va_get_ok va_sM /\
va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0 -
8
`op_Multiply`
(8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) /\
Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_sM)
(8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0))
(va_get_stack va_sM)
Secret
(va_get_stackTaint va_sM) /\
Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0)
(va_get_stack va_s0)
(va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0)
(va_get_stackTaint va_sM) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 6 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 7 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 8 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 9 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 10 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 11 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 12 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 13 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 14 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 15 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 0 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_sM) ==
va_get_reg64 rRbx va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_sM) ==
va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_sM) ==
va_get_reg64 rRdi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_sM) ==
va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_sM) ==
va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_sM) ==
va_get_reg64 rR13 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 48 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_sM) ==
va_get_reg64 rR14 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_sM) ==
va_get_reg64 rR15 va_sM ==>
va_k va_sM (()))) | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_upd_stackTaint | val va_upd_stackTaint (stackTaint: M.memtaint) (s: state) : state | val va_upd_stackTaint (stackTaint: M.memtaint) (s: state) : state | let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint } | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 115,
"end_line": 182,
"start_col": 12,
"start_line": 182
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) } | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | stackTaint: Vale.PPC64LE.Memory.memtaint -> s: Vale.PPC64LE.State.state -> Vale.PPC64LE.State.state | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Memory.memtaint",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.Machine_s.Mkstate",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ok",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__regs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__vecs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__cr0",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__xer",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_heap",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stack"
] | [] | false | false | false | true | false | let va_upd_stackTaint (stackTaint: M.memtaint) (s: state) : state =
| { s with ms_stackTaint = stackTaint } | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_update_ok | val va_update_ok (sM sK: va_state) : va_state | val va_update_ok (sM sK: va_state) : va_state | let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 95,
"end_line": 185,
"start_col": 19,
"start_line": 185
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint } | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | sM: Vale.PPC64LE.Decls.va_state -> sK: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Decls.va_state | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_upd_ok",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ok"
] | [] | false | false | false | true | false | let va_update_ok (sM sK: va_state) : va_state =
| va_upd_ok sM.ok sK | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.valid_mem_addr | val valid_mem_addr (tm: tmaddr) (s: state) : prop0 | val valid_mem_addr (tm: tmaddr) (s: state) : prop0 | let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 106,
"end_line": 120,
"start_col": 0,
"start_line": 117
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | tm: Vale.PPC64LE.Machine_s.tmaddr -> s: Vale.PPC64LE.State.state -> Vale.Def.Prop_s.prop0 | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.tmaddr",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.Machine_s.maddr",
"Vale.Arch.HeapTypes_s.taint",
"Prims.l_and",
"Vale.PPC64LE.State.valid_maddr",
"Vale.PPC64LE.Decls.valid_mem_operand64",
"Vale.PPC64LE.State.eval_maddr",
"Vale.PPC64LE.Memory.get_vale_heap",
"Vale.PPC64LE.Decls.coerce",
"Vale.PPC64LE.Memory.vale_full_heap",
"Vale.Arch.Heap.heap_impl",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_heap",
"Vale.Arch.HeapImpl.__proj__Mkvale_full_heap__item__vf_layout",
"Vale.Arch.HeapImpl.vale_full_heap",
"Vale.Def.Prop_s.prop0"
] | [] | false | false | false | true | false | let valid_mem_addr (tm: tmaddr) (s: state) : prop0 =
| let m, t = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s)
t
(M.get_vale_heap (coerce s.ms_heap))
(coerce s.ms_heap).vf_layout | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_update_xer | val va_update_xer (sM sK: va_state) : va_state | val va_update_xer (sM sK: va_state) : va_state | let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 98,
"end_line": 187,
"start_col": 19,
"start_line": 187
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | sM: Vale.PPC64LE.Decls.va_state -> sK: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Decls.va_state | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_upd_xer",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__xer"
] | [] | false | false | false | true | false | let va_update_xer (sM sK: va_state) : va_state =
| va_upd_xer sM.xer sK | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_upd_cr0 | val va_upd_cr0 (cr0: cr0_t) (s: state) : state | val va_upd_cr0 (cr0: cr0_t) (s: state) : state | let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 } | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 79,
"end_line": 173,
"start_col": 12,
"start_line": 173
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | cr0: Vale.PPC64LE.Machine_s.cr0_t -> s: Vale.PPC64LE.State.state -> Vale.PPC64LE.State.state | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.cr0_t",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.Machine_s.Mkstate",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ok",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__regs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__vecs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__xer",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_heap",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stack",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stackTaint"
] | [] | false | false | false | true | false | let va_upd_cr0 (cr0: cr0_t) (s: state) : state =
| { s with cr0 = cr0 } | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_update_reg | val va_update_reg (r: reg) (sM sK: va_state) : va_state | val va_update_reg (r: reg) (sM sK: va_state) : va_state | let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 33,
"end_line": 189,
"start_col": 19,
"start_line": 188
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: Vale.PPC64LE.Machine_s.reg -> sM: Vale.PPC64LE.Decls.va_state -> sK: Vale.PPC64LE.Decls.va_state
-> Vale.PPC64LE.Decls.va_state | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.reg",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_upd_reg",
"Vale.PPC64LE.State.eval_reg"
] | [] | false | false | false | true | false | let va_update_reg (r: reg) (sM sK: va_state) : va_state =
| va_upd_reg r (eval_reg r sM) sK | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_update_cr0 | val va_update_cr0 (sM sK: va_state) : va_state | val va_update_cr0 (sM sK: va_state) : va_state | let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 98,
"end_line": 186,
"start_col": 19,
"start_line": 186
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | sM: Vale.PPC64LE.Decls.va_state -> sK: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Decls.va_state | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_upd_cr0",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__cr0"
] | [] | false | false | false | true | false | let va_update_cr0 (sM sK: va_state) : va_state =
| va_upd_cr0 sM.cr0 sK | false |
Vale.AES.X64.GCMencryptOpt.fsti | Vale.AES.X64.GCMencryptOpt.va_quick_Restore_registers | val va_quick_Restore_registers
(win: bool)
(old_rsp: nat)
(old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15:
quad32)
: (va_quickCode unit (va_code_Restore_registers win)) | val va_quick_Restore_registers
(win: bool)
(old_rsp: nat)
(old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15:
quad32)
: (va_quickCode unit (va_code_Restore_registers win)) | let va_quick_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) : (va_quickCode unit
(va_code_Restore_registers win)) =
(va_QProc (va_code_Restore_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp;
va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11;
va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15;
va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi;
va_Mod_reg64 rRbp; va_Mod_reg64 rRbx; va_Mod_reg64 rRax]) (va_wp_Restore_registers win old_rsp
old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14
old_xmm15) (va_wpProof_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8 old_xmm9
old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15)) | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 65,
"end_line": 718,
"start_col": 0,
"start_line": 707
} | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads))
//--
//-- Gcm_blocks_auth
val va_code_Gcm_blocks_auth : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_blocks_auth : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_blocks_auth : va_b0:va_code -> va_s0:va_state -> auth_b:buffer128 ->
abytes_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_require_total va_b0 (va_code_Gcm_blocks_auth ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi
va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE))))
(ensures (fun (va_sM, va_fM, auth_quad_seq) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)
auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let
(padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\ va_get_xmm 8
va_sM == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) auth_quad_seq))) /\ va_state_eq va_sM
(va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM
(va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM
(va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_flags va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM va_s0)))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)
(va_s0:va_state) (va_k:(va_state -> (seq quad32) -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\ (forall (va_x_rdx:nat64)
(va_x_r11:nat64) (va_x_r10:nat64) (va_x_rcx:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(auth_quad_seq:(seq quad32)) . let va_sM = va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8
(va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4
(va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0
(va_upd_flags va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64
rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdx va_x_rdx va_s0))))))))))))))) in
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1
va_sM) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64
rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits
auth_input_bytes in auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes
/\ va_get_xmm 8 va_sM == Vale.Def.Types_s.reverse_bytes_quad32
(Vale.AES.GHash.ghash_incremental0 h_LE (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0
0) auth_quad_seq))) ==> va_k va_sM ((auth_quad_seq))))
val va_wpProof_Gcm_blocks_auth : auth_b:buffer128 -> abytes_b:buffer128 -> hkeys_b:buffer128 ->
h_LE:quad32 -> va_s0:va_state -> va_k:(va_state -> (seq quad32) -> Type0)
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64
rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128)
(h_LE:quad32) : (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) =
(va_QProc (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;
va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRdx]) (va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE))
//--
//-- Save_registers
val va_code_Save_registers : win:bool -> Tot va_code
val va_codegen_success_Save_registers : win:bool -> Tot va_pbool
val va_lemma_Save_registers : va_b0:va_code -> va_s0:va_state -> win:bool
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Save_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + (if win then (10
`op_Multiply` 2) else 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s
(va_get_reg64 rRsp va_sM) (8 + (if win then (10 `op_Multiply` 2) else 0)) (va_get_stack va_sM)
Secret (va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM /\ va_state_eq va_sM
(va_update_stackTaint va_sM (va_update_flags va_sM (va_update_stack va_sM (va_update_reg64 rRsp
va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Save_registers (win:bool) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ (forall (va_x_rax:nat64) (va_x_rsp:nat64) (va_x_stack:vale_stack)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_stack va_x_stack (va_upd_reg64 rRsp va_x_rsp
(va_upd_reg64 rRax va_x_rax va_s0)))) in va_get_ok va_sM /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _
-> 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_sM) (8 +
va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_sM) Secret
(va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 24 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM ==> va_k va_sM
(())))
val va_wpProof_Save_registers : win:bool -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Save_registers win va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Save_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack; va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) va_s0
va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Save_registers (win:bool) : (va_quickCode unit (va_code_Save_registers win)) =
(va_QProc (va_code_Save_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack;
va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) (va_wp_Save_registers win) (va_wpProof_Save_registers
win))
//--
//-- Restore_registers
val va_code_Restore_registers : win:bool -> Tot va_code
val va_codegen_success_Restore_registers : win:bool -> Tot va_pbool
val va_lemma_Restore_registers : va_b0:va_code -> va_s0:va_state -> win:bool -> old_rsp:nat ->
old_xmm6:quad32 -> old_xmm7:quad32 -> old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 ->
old_xmm11:quad32 -> old_xmm12:quad32 -> old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Restore_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + (if win then (10
`op_Multiply` 2) else 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\ va_get_reg64
rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + (if win then (10 `op_Multiply` 2) else 0)) /\ (win
==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 8) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 24) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 56) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 64) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 72) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 80) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 88) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 96) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 104) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 112) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 120) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 128) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 136) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 144) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm15) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 152) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm15)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64
rRsp va_s0) (va_get_reg64 rRsp va_sM) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 24 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 40 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR13 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 56 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\
(win ==> va_get_xmm 6 va_sM == old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win
==> va_get_xmm 8 va_sM == old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==>
va_get_xmm 10 va_sM == old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==>
va_get_xmm 12 va_sM == old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==>
va_get_xmm 14 va_sM == old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) /\ va_state_eq
va_sM (va_update_stackTaint va_sM (va_update_flags va_sM (va_update_reg64 rRsp va_sM
(va_update_stack va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM
(va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM
(va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM
(va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM
va_s0))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + va_if win (fun _ -> 10
`op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\
va_get_reg64 rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply`
2) (fun _ -> 0)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0)
(va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 16) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 48) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 64) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 72) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 80) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 88) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 96) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 104) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 112) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 120) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 128) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 136) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 144) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm15) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 152) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm15) /\ (forall (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rbp:nat64)
(va_x_rdi:nat64) (va_x_rsi:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64)
(va_x_r15:nat64) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_stack:vale_stack) (va_x_rsp:nat64)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_reg64 rRsp va_x_rsp (va_upd_stack va_x_stack
(va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12
va_x_xmm12 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9
(va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_reg64 rR15
va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRbp va_x_rbp
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax va_s0)))))))))))))))))))))) in
va_get_ok va_sM /\ va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp
(va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_s0) (va_get_reg64 rRsp va_sM)
(va_get_stack va_s0) (va_get_stack va_sM) /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbx
va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + va_if win (fun _ -> 160)
(fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 16 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\ (win ==> va_get_xmm 6 va_sM ==
old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win ==> va_get_xmm 8 va_sM ==
old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==> va_get_xmm 10 va_sM ==
old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==> va_get_xmm 12 va_sM ==
old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==> va_get_xmm 14 va_sM ==
old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) ==> va_k va_sM (())))
val va_wpProof_Restore_registers : win:bool -> old_rsp:nat -> old_xmm6:quad32 -> old_xmm7:quad32 ->
old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 -> old_xmm11:quad32 -> old_xmm12:quad32 ->
old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32 -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8
old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15 va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Restore_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp; va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm
14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;
va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13;
va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRbp; va_Mod_reg64 rRbx;
va_Mod_reg64 rRax]) va_s0 va_k ((va_sM, va_f0, va_g)))) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
win: Prims.bool ->
old_rsp: Prims.nat ->
old_xmm6: Vale.X64.Decls.quad32 ->
old_xmm7: Vale.X64.Decls.quad32 ->
old_xmm8: Vale.X64.Decls.quad32 ->
old_xmm9: Vale.X64.Decls.quad32 ->
old_xmm10: Vale.X64.Decls.quad32 ->
old_xmm11: Vale.X64.Decls.quad32 ->
old_xmm12: Vale.X64.Decls.quad32 ->
old_xmm13: Vale.X64.Decls.quad32 ->
old_xmm14: Vale.X64.Decls.quad32 ->
old_xmm15: Vale.X64.Decls.quad32
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.AES.X64.GCMencryptOpt.va_code_Restore_registers win) | Prims.Tot | [
"total"
] | [] | [
"Prims.bool",
"Prims.nat",
"Vale.X64.Decls.quad32",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.AES.X64.GCMencryptOpt.va_code_Restore_registers",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_stackTaint",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.QuickCode.va_Mod_stack",
"Vale.X64.QuickCode.va_Mod_xmm",
"Vale.X64.Machine_s.rR15",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRax",
"Prims.Nil",
"Vale.AES.X64.GCMencryptOpt.va_wp_Restore_registers",
"Vale.AES.X64.GCMencryptOpt.va_wpProof_Restore_registers",
"Vale.X64.QuickCode.va_quickCode"
] | [] | false | false | false | false | false | let va_quick_Restore_registers
(win: bool)
(old_rsp: nat)
(old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15:
quad32)
: (va_quickCode unit (va_code_Restore_registers win)) =
| (va_QProc (va_code_Restore_registers win)
([
va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp; va_Mod_stack; va_Mod_xmm 15;
va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15; va_Mod_reg64 rR14;
va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi;
va_Mod_reg64 rRbp; va_Mod_reg64 rRbx; va_Mod_reg64 rRax
])
(va_wp_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11
old_xmm12 old_xmm13 old_xmm14 old_xmm15)
(va_wpProof_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10
old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15)) | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_update_operand_reg_opr | val va_update_operand_reg_opr (r: reg) (sM sK: va_state) : va_state | val va_update_operand_reg_opr (r: reg) (sM sK: va_state) : va_state | let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 23,
"end_line": 201,
"start_col": 0,
"start_line": 200
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: Vale.PPC64LE.Machine_s.reg -> sM: Vale.PPC64LE.Decls.va_state -> sK: Vale.PPC64LE.Decls.va_state
-> Vale.PPC64LE.Decls.va_state | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.reg",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_update_reg"
] | [] | false | false | false | true | false | let va_update_operand_reg_opr (r: reg) (sM sK: va_state) : va_state =
| va_update_reg r sM sK | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_update_stackTaint | val va_update_stackTaint (sM sK: va_state) : va_state | val va_update_stackTaint (sM sK: va_state) : va_state | let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 122,
"end_line": 197,
"start_col": 19,
"start_line": 197
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | sM: Vale.PPC64LE.Decls.va_state -> sK: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Decls.va_state | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_upd_stackTaint",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stackTaint"
] | [] | false | false | false | true | false | let va_update_stackTaint (sM sK: va_state) : va_state =
| va_upd_stackTaint sM.ms_stackTaint sK | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_update_vec | val va_update_vec (x: vec) (sM sK: va_state) : va_state | val va_update_vec (x: vec) (sM sK: va_state) : va_state | let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 33,
"end_line": 195,
"start_col": 19,
"start_line": 194
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state = | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Vale.PPC64LE.Machine_s.vec -> sM: Vale.PPC64LE.Decls.va_state -> sK: Vale.PPC64LE.Decls.va_state
-> Vale.PPC64LE.Decls.va_state | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.vec",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_upd_vec",
"Vale.PPC64LE.State.eval_vec"
] | [] | false | false | false | true | false | let va_update_vec (x: vec) (sM sK: va_state) : va_state =
| va_upd_vec x (eval_vec x sM) sK | false |
Vale.AES.X64.GCMencryptOpt.fsti | Vale.AES.X64.GCMencryptOpt.va_wp_Restore_registers | val va_wp_Restore_registers
(win: bool)
(old_rsp: nat)
(old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15:
quad32)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | val va_wp_Restore_registers
(win: bool)
(old_rsp: nat)
(old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15:
quad32)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | let va_wp_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + va_if win (fun _ -> 10
`op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\
va_get_reg64 rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply`
2) (fun _ -> 0)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0)
(va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 16) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 48) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 64) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 72) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 80) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 88) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 96) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 104) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 112) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 120) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 128) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 136) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 144) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm15) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 152) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm15) /\ (forall (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rbp:nat64)
(va_x_rdi:nat64) (va_x_rsi:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64)
(va_x_r15:nat64) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_stack:vale_stack) (va_x_rsp:nat64)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_reg64 rRsp va_x_rsp (va_upd_stack va_x_stack
(va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12
va_x_xmm12 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9
(va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_reg64 rR15
va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRbp va_x_rbp
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax va_s0)))))))))))))))))))))) in
va_get_ok va_sM /\ va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp
(va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_s0) (va_get_reg64 rRsp va_sM)
(va_get_stack va_s0) (va_get_stack va_sM) /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbx
va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + va_if win (fun _ -> 160)
(fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 16 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\ (win ==> va_get_xmm 6 va_sM ==
old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win ==> va_get_xmm 8 va_sM ==
old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==> va_get_xmm 10 va_sM ==
old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==> va_get_xmm 12 va_sM ==
old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==> va_get_xmm 14 va_sM ==
old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) ==> va_k va_sM (()))) | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 82,
"end_line": 691,
"start_col": 0,
"start_line": 619
} | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads))
//--
//-- Gcm_blocks_auth
val va_code_Gcm_blocks_auth : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_blocks_auth : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_blocks_auth : va_b0:va_code -> va_s0:va_state -> auth_b:buffer128 ->
abytes_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_require_total va_b0 (va_code_Gcm_blocks_auth ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi
va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE))))
(ensures (fun (va_sM, va_fM, auth_quad_seq) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)
auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let
(padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\ va_get_xmm 8
va_sM == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) auth_quad_seq))) /\ va_state_eq va_sM
(va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM
(va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM
(va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_flags va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM va_s0)))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)
(va_s0:va_state) (va_k:(va_state -> (seq quad32) -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\ (forall (va_x_rdx:nat64)
(va_x_r11:nat64) (va_x_r10:nat64) (va_x_rcx:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(auth_quad_seq:(seq quad32)) . let va_sM = va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8
(va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4
(va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0
(va_upd_flags va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64
rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdx va_x_rdx va_s0))))))))))))))) in
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1
va_sM) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64
rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits
auth_input_bytes in auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes
/\ va_get_xmm 8 va_sM == Vale.Def.Types_s.reverse_bytes_quad32
(Vale.AES.GHash.ghash_incremental0 h_LE (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0
0) auth_quad_seq))) ==> va_k va_sM ((auth_quad_seq))))
val va_wpProof_Gcm_blocks_auth : auth_b:buffer128 -> abytes_b:buffer128 -> hkeys_b:buffer128 ->
h_LE:quad32 -> va_s0:va_state -> va_k:(va_state -> (seq quad32) -> Type0)
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64
rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128)
(h_LE:quad32) : (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) =
(va_QProc (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;
va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRdx]) (va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE))
//--
//-- Save_registers
val va_code_Save_registers : win:bool -> Tot va_code
val va_codegen_success_Save_registers : win:bool -> Tot va_pbool
val va_lemma_Save_registers : va_b0:va_code -> va_s0:va_state -> win:bool
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Save_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + (if win then (10
`op_Multiply` 2) else 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s
(va_get_reg64 rRsp va_sM) (8 + (if win then (10 `op_Multiply` 2) else 0)) (va_get_stack va_sM)
Secret (va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM /\ va_state_eq va_sM
(va_update_stackTaint va_sM (va_update_flags va_sM (va_update_stack va_sM (va_update_reg64 rRsp
va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Save_registers (win:bool) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ (forall (va_x_rax:nat64) (va_x_rsp:nat64) (va_x_stack:vale_stack)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_stack va_x_stack (va_upd_reg64 rRsp va_x_rsp
(va_upd_reg64 rRax va_x_rax va_s0)))) in va_get_ok va_sM /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _
-> 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_sM) (8 +
va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_sM) Secret
(va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 24 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM ==> va_k va_sM
(())))
val va_wpProof_Save_registers : win:bool -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Save_registers win va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Save_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack; va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) va_s0
va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Save_registers (win:bool) : (va_quickCode unit (va_code_Save_registers win)) =
(va_QProc (va_code_Save_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack;
va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) (va_wp_Save_registers win) (va_wpProof_Save_registers
win))
//--
//-- Restore_registers
val va_code_Restore_registers : win:bool -> Tot va_code
val va_codegen_success_Restore_registers : win:bool -> Tot va_pbool
val va_lemma_Restore_registers : va_b0:va_code -> va_s0:va_state -> win:bool -> old_rsp:nat ->
old_xmm6:quad32 -> old_xmm7:quad32 -> old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 ->
old_xmm11:quad32 -> old_xmm12:quad32 -> old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Restore_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + (if win then (10
`op_Multiply` 2) else 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\ va_get_reg64
rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + (if win then (10 `op_Multiply` 2) else 0)) /\ (win
==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 8) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 24) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 56) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 64) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 72) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 80) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 88) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 96) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 104) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 112) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 120) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 128) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 136) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 144) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm15) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 152) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm15)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64
rRsp va_s0) (va_get_reg64 rRsp va_sM) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 24 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 40 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR13 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 56 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\
(win ==> va_get_xmm 6 va_sM == old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win
==> va_get_xmm 8 va_sM == old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==>
va_get_xmm 10 va_sM == old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==>
va_get_xmm 12 va_sM == old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==>
va_get_xmm 14 va_sM == old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) /\ va_state_eq
va_sM (va_update_stackTaint va_sM (va_update_flags va_sM (va_update_reg64 rRsp va_sM
(va_update_stack va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM
(va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM
(va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM
(va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM
va_s0)))))))))))))))))))))))))) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
win: Prims.bool ->
old_rsp: Prims.nat ->
old_xmm6: Vale.X64.Decls.quad32 ->
old_xmm7: Vale.X64.Decls.quad32 ->
old_xmm8: Vale.X64.Decls.quad32 ->
old_xmm9: Vale.X64.Decls.quad32 ->
old_xmm10: Vale.X64.Decls.quad32 ->
old_xmm11: Vale.X64.Decls.quad32 ->
old_xmm12: Vale.X64.Decls.quad32 ->
old_xmm13: Vale.X64.Decls.quad32 ->
old_xmm14: Vale.X64.Decls.quad32 ->
old_xmm15: Vale.X64.Decls.quad32 ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | Prims.Tot | [
"total"
] | [] | [
"Prims.bool",
"Prims.nat",
"Vale.X64.Decls.quad32",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.sse_enabled",
"Prims.eq2",
"Vale.X64.Stack_i.init_rsp",
"Vale.X64.Decls.va_get_stack",
"Vale.X64.Stack_i.valid_stack_slot64s",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Prims.op_Addition",
"Vale.X64.Decls.va_if",
"Prims.int",
"Prims.op_Multiply",
"Prims.l_not",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.va_get_stackTaint",
"Prims.op_Subtraction",
"Prims.l_imp",
"Vale.Def.Words_s.nat64",
"Vale.X64.Stack_i.load_stack64",
"Vale.Arch.Types.hi64",
"Vale.Arch.Types.lo64",
"Prims.l_Forall",
"Vale.X64.Memory.nat64",
"Vale.X64.InsBasic.vale_stack",
"Vale.X64.Flags.t",
"Vale.X64.Memory.memtaint",
"Prims.l_or",
"Prims.op_GreaterThanOrEqual",
"Vale.X64.Stack_i.modifies_stack",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR15",
"Vale.X64.Decls.va_get_xmm",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_stackTaint",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Decls.va_upd_stack",
"Vale.X64.Decls.va_upd_xmm",
"Vale.X64.Machine_s.rRax"
] | [] | false | false | false | true | true | let va_wp_Restore_registers
(win: bool)
(old_rsp: nat)
(old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15:
quad32)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 =
| (va_get_ok va_s0 /\ sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0)
(8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0))
(va_get_stack va_s0)
Secret
(va_get_stackTaint va_s0) /\
va_get_reg64 rRsp va_s0 ==
old_rsp - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm6) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm6) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm7) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm7) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm8) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm8) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm9) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm9) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 64) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm10) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 72) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm10) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 80) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm11) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 88) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm11) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 96) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm12) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 104) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm12) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 112) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm13) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 120) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm13) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 128) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm14) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 136) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm14) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 144) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm15) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 152) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm15) /\
(forall (va_x_rax: nat64) (va_x_rbx: nat64) (va_x_rbp: nat64) (va_x_rdi: nat64) (va_x_rsi: nat64)
(va_x_r12: nat64) (va_x_r13: nat64) (va_x_r14: nat64) (va_x_r15: nat64) (va_x_xmm6: quad32)
(va_x_xmm7: quad32) (va_x_xmm8: quad32) (va_x_xmm9: quad32) (va_x_xmm10: quad32)
(va_x_xmm11: quad32) (va_x_xmm12: quad32) (va_x_xmm13: quad32) (va_x_xmm14: quad32)
(va_x_xmm15: quad32) (va_x_stack: vale_stack) (va_x_rsp: nat64) (va_x_efl: Vale.X64.Flags.t)
(va_x_stackTaint: memtaint).
let va_sM =
va_upd_stackTaint va_x_stackTaint
(va_upd_flags va_x_efl
(va_upd_reg64 rRsp
va_x_rsp
(va_upd_stack va_x_stack
(va_upd_xmm 15
va_x_xmm15
(va_upd_xmm 14
va_x_xmm14
(va_upd_xmm 13
va_x_xmm13
(va_upd_xmm 12
va_x_xmm12
(va_upd_xmm 11
va_x_xmm11
(va_upd_xmm 10
va_x_xmm10
(va_upd_xmm 9
va_x_xmm9
(va_upd_xmm 8
va_x_xmm8
(va_upd_xmm 7
va_x_xmm7
(va_upd_xmm 6
va_x_xmm6
(va_upd_reg64 rR15
va_x_r15
(va_upd_reg64 rR14
va_x_r14
(va_upd_reg64 rR13
va_x_r13
(va_upd_reg64 rR12
va_x_r12
(va_upd_reg64 rRsi
va_x_rsi
(va_upd_reg64 rRdi
va_x_rdi
(va_upd_reg64 rRbp
va_x_rbp
(va_upd_reg64
rRbx
va_x_rbx
(va_upd_reg64
rRax
va_x_rax
va_s0
))))
))))))))))))))))))
in
va_get_ok va_sM /\ va_get_reg64 rRsp va_sM == old_rsp /\
Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_s0)
(va_get_reg64 rRsp va_sM)
(va_get_stack va_s0)
(va_get_stack va_sM) /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_s0) ==
va_get_reg64 rRbx va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_s0) ==
va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_s0) ==
va_get_reg64 rRdi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_s0) ==
va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_s0) ==
va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_s0) ==
va_get_reg64 rR13 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_s0) ==
va_get_reg64 rR14 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_s0) ==
va_get_reg64 rR15 va_sM /\ (win ==> va_get_xmm 6 va_sM == old_xmm6) /\
(win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win ==> va_get_xmm 8 va_sM == old_xmm8) /\
(win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==> va_get_xmm 10 va_sM == old_xmm10) /\
(win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==> va_get_xmm 12 va_sM == old_xmm12) /\
(win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==> va_get_xmm 14 va_sM == old_xmm14) /\
(win ==> va_get_xmm 15 va_sM == old_xmm15) ==>
va_k va_sM (()))) | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_update_stack | val va_update_stack (sM sK: va_state) : va_state | val va_update_stack (sM sK: va_state) : va_state | let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 126,
"end_line": 196,
"start_col": 19,
"start_line": 196
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state = | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | sM: Vale.PPC64LE.Decls.va_state -> sK: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Decls.va_state | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_upd_stack",
"Vale.PPC64LE.Stack_Sems.stack_from_s",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stack"
] | [] | false | false | false | true | false | let va_update_stack (sM sK: va_state) : va_state =
| va_upd_stack (VSS.stack_from_s sM.ms_stack) sK | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_update_operand_heaplet | val va_update_operand_heaplet (h: heaplet_id) (sM sK: va_state) : va_state | val va_update_operand_heaplet (h: heaplet_id) (sM sK: va_state) : va_state | let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 31,
"end_line": 213,
"start_col": 0,
"start_line": 212
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
h: Vale.PPC64LE.Decls.heaplet_id ->
sM: Vale.PPC64LE.Decls.va_state ->
sK: Vale.PPC64LE.Decls.va_state
-> Vale.PPC64LE.Decls.va_state | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.heaplet_id",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_update_mem_heaplet"
] | [] | false | false | false | true | false | let va_update_operand_heaplet (h: heaplet_id) (sM sK: va_state) : va_state =
| va_update_mem_heaplet h sM sK | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_value_vec_opr | val va_value_vec_opr : Prims.eqtype | let va_value_vec_opr = quad32 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 36,
"end_line": 216,
"start_col": 7,
"start_line": 216
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Prims.eqtype | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.quad32"
] | [] | false | false | false | true | false | let va_value_vec_opr =
| quad32 | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_get_mem_layout | val va_get_mem_layout (s: va_state) : vale_heap_layout | val va_get_mem_layout (s: va_state) : vale_heap_layout | let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 103,
"end_line": 149,
"start_col": 19,
"start_line": 149
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Vale.PPC64LE.Decls.va_state -> Vale.Arch.HeapImpl.vale_heap_layout | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.Arch.HeapImpl.__proj__Mkvale_full_heap__item__vf_layout",
"Vale.PPC64LE.Decls.coerce",
"Vale.Arch.HeapImpl.vale_full_heap",
"Vale.Arch.Heap.heap_impl",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_heap",
"Vale.Arch.HeapImpl.vale_heap_layout"
] | [] | false | false | false | true | false | let va_get_mem_layout (s: va_state) : vale_heap_layout =
| (coerce s.ms_heap).vf_layout | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_value_heaplet | val va_value_heaplet : Type | let va_value_heaplet = vale_heap | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 39,
"end_line": 217,
"start_col": 7,
"start_line": 217
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64 | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Type | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.vale_heap"
] | [] | false | false | false | true | true | let va_value_heaplet =
| vale_heap | false |
|
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_update_operand_Mem64 | val va_update_operand_Mem64 (m: maddr) (sM sK: va_state) : va_state | val va_update_operand_Mem64 (m: maddr) (sM sK: va_state) : va_state | let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 21,
"end_line": 205,
"start_col": 0,
"start_line": 204
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
m: Vale.PPC64LE.Machine_s.maddr ->
sM: Vale.PPC64LE.Decls.va_state ->
sK: Vale.PPC64LE.Decls.va_state
-> Vale.PPC64LE.Decls.va_state | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.maddr",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_update_mem"
] | [] | false | false | false | true | false | let va_update_operand_Mem64 (m: maddr) (sM sK: va_state) : va_state =
| va_update_mem sM sK | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_get_mem | val va_get_mem (s: va_state) : vale_heap | val va_get_mem (s: va_state) : vale_heap | let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap) | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 95,
"end_line": 148,
"start_col": 19,
"start_line": 148
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Decls.vale_heap | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Memory.get_vale_heap",
"Vale.PPC64LE.Decls.coerce",
"Vale.PPC64LE.Memory.vale_full_heap",
"Vale.Arch.Heap.heap_impl",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_heap",
"Vale.PPC64LE.Decls.vale_heap"
] | [] | false | false | false | true | false | let va_get_mem (s: va_state) : vale_heap =
| M.get_vale_heap (coerce s.ms_heap) | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_update_operand_vec_opr | val va_update_operand_vec_opr (x: vec) (sM sK: va_state) : va_state | val va_update_operand_vec_opr (x: vec) (sM sK: va_state) : va_state | let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 23,
"end_line": 209,
"start_col": 0,
"start_line": 208
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Vale.PPC64LE.Machine_s.vec -> sM: Vale.PPC64LE.Decls.va_state -> sK: Vale.PPC64LE.Decls.va_state
-> Vale.PPC64LE.Decls.va_state | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.vec",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_update_vec"
] | [] | false | false | false | true | false | let va_update_operand_vec_opr (x: vec) (sM sK: va_state) : va_state =
| va_update_vec x sM sK | false |
Vale.PPC64LE.Decls.fsti | Vale.PPC64LE.Decls.va_value_reg_opr | val va_value_reg_opr : Type0 | let va_value_reg_opr = nat64 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 35,
"end_line": 215,
"start_col": 7,
"start_line": 215
} | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK | {
"checked_file": "/",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Print_s",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Type0 | Prims.Tot | [
"total"
] | [] | [
"Vale.PPC64LE.Machine_s.nat64"
] | [] | false | false | false | true | true | let va_value_reg_opr =
| nat64 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.