effect
stringclasses 48
values | original_source_type
stringlengths 0
23k
| opens_and_abbrevs
listlengths 2
92
| isa_cross_project_example
bool 1
class | source_definition
stringlengths 9
57.9k
| partial_definition
stringlengths 7
23.3k
| is_div
bool 2
classes | is_type
null | is_proof
bool 2
classes | completed_definiton
stringlengths 1
250k
| dependencies
dict | effect_flags
sequencelengths 0
2
| ideal_premises
sequencelengths 0
236
| mutual_with
sequencelengths 0
11
| file_context
stringlengths 0
407k
| interleaved
bool 1
class | is_simply_typed
bool 2
classes | file_name
stringlengths 5
48
| vconfig
dict | is_simple_lemma
null | source_type
stringlengths 10
23k
| proof_features
sequencelengths 0
1
| name
stringlengths 8
95
| source
dict | verbose_type
stringlengths 1
7.42k
| source_range
dict |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Prims.Tot | val va_update_reg (r: reg) (sM sK: va_state) : va_state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK | val va_update_reg (r: reg) (sM sK: va_state) : va_state
let va_update_reg (r: reg) (sM sK: va_state) : va_state = | false | null | false | va_upd_reg r (eval_reg r sM) sK | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.reg",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_upd_reg",
"Vale.PPC64LE.State.eval_reg"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_update_reg (r: reg) (sM sK: va_state) : va_state | [] | Vale.PPC64LE.Decls.va_update_reg | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | r: Vale.PPC64LE.Machine_s.reg -> sM: Vale.PPC64LE.Decls.va_state -> sK: Vale.PPC64LE.Decls.va_state
-> Vale.PPC64LE.Decls.va_state | {
"end_col": 33,
"end_line": 189,
"start_col": 2,
"start_line": 189
} |
Prims.Tot | val valid_buf_maddr64
(addr: int)
(s_mem: vale_heap)
(layout: vale_heap_layout)
(b: M.buffer64)
(index: int)
(t: taint)
: prop0 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index | val valid_buf_maddr64
(addr: int)
(s_mem: vale_heap)
(layout: vale_heap_layout)
(b: M.buffer64)
(index: int)
(t: taint)
: prop0
let valid_buf_maddr64
(addr: int)
(s_mem: vale_heap)
(layout: vale_heap_layout)
(b: M.buffer64)
(index: int)
(t: taint)
: prop0 = | false | null | false | valid_src_addr s_mem b index /\ M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Prims.int",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Vale.PPC64LE.Memory.buffer64",
"Vale.Arch.HeapTypes_s.taint",
"Prims.l_and",
"Vale.PPC64LE.Decls.valid_src_addr",
"Vale.PPC64LE.Memory.vuint64",
"Vale.PPC64LE.Memory.valid_taint_buf64",
"Vale.Arch.HeapImpl.__proj__Mkvale_heap_layout__item__vl_taint",
"Prims.eq2",
"Prims.op_Addition",
"Vale.PPC64LE.Memory.buffer_addr",
"FStar.Mul.op_Star",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val valid_buf_maddr64
(addr: int)
(s_mem: vale_heap)
(layout: vale_heap_layout)
(b: M.buffer64)
(index: int)
(t: taint)
: prop0 | [] | Vale.PPC64LE.Decls.valid_buf_maddr64 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
addr: Prims.int ->
s_mem: Vale.PPC64LE.Decls.vale_heap ->
layout: Vale.Arch.HeapImpl.vale_heap_layout ->
b: Vale.PPC64LE.Memory.buffer64 ->
index: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint
-> Vale.Def.Prop_s.prop0 | {
"end_col": 43,
"end_line": 101,
"start_col": 2,
"start_line": 99
} |
Prims.Tot | val va_eval_heaplet (s: va_state) (h: heaplet_id) : vale_heap | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s | val va_eval_heaplet (s: va_state) (h: heaplet_id) : vale_heap
let va_eval_heaplet (s: va_state) (h: heaplet_id) : vale_heap = | false | null | false | va_get_mem_heaplet h s | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.heaplet_id",
"Vale.PPC64LE.Decls.va_get_mem_heaplet",
"Vale.PPC64LE.Decls.vale_heap"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_eval_heaplet (s: va_state) (h: heaplet_id) : vale_heap | [] | Vale.PPC64LE.Decls.va_eval_heaplet | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Vale.PPC64LE.Decls.va_state -> h: Vale.PPC64LE.Decls.heaplet_id -> Vale.PPC64LE.Decls.vale_heap | {
"end_col": 103,
"end_line": 160,
"start_col": 81,
"start_line": 160
} |
Prims.Tot | val va_upd_ok (ok: bool) (s: state) : state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok } | val va_upd_ok (ok: bool) (s: state) : state
let va_upd_ok (ok: bool) (s: state) : state = | false | null | false | { s with ok = ok } | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Prims.bool",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.Machine_s.Mkstate",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__regs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__vecs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__cr0",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__xer",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_heap",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stack",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stackTaint"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_upd_ok (ok: bool) (s: state) : state | [] | Vale.PPC64LE.Decls.va_upd_ok | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | ok: Prims.bool -> s: Vale.PPC64LE.State.state -> Vale.PPC64LE.State.state | {
"end_col": 72,
"end_line": 172,
"start_col": 58,
"start_line": 172
} |
Prims.GTot | val s128 (m: vale_heap) (b: M.buffer128) : GTot (Seq.seq quad32) | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b | val s128 (m: vale_heap) (b: M.buffer128) : GTot (Seq.seq quad32)
let s128 (m: vale_heap) (b: M.buffer128) : GTot (Seq.seq quad32) = | false | null | false | buffer128_as_seq m b | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial"
] | [
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer128",
"Vale.PPC64LE.Decls.buffer128_as_seq",
"FStar.Seq.Base.seq",
"Vale.PPC64LE.Machine_s.quad32"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val s128 (m: vale_heap) (b: M.buffer128) : GTot (Seq.seq quad32) | [] | Vale.PPC64LE.Decls.s128 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | m: Vale.PPC64LE.Decls.vale_heap -> b: Vale.PPC64LE.Memory.buffer128
-> Prims.GTot (FStar.Seq.Base.seq Vale.PPC64LE.Machine_s.quad32) | {
"end_col": 92,
"end_line": 87,
"start_col": 72,
"start_line": 87
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True | let va_is_dst_vec_opr (v: vec_opr) (s: va_state) = | false | null | false | True | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.vec_opr",
"Vale.PPC64LE.Decls.va_state",
"Prims.l_True",
"Prims.logical"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_is_dst_vec_opr : v: Vale.PPC64LE.Decls.vec_opr -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | [] | Vale.PPC64LE.Decls.va_is_dst_vec_opr | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | v: Vale.PPC64LE.Decls.vec_opr -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | {
"end_col": 72,
"end_line": 168,
"start_col": 68,
"start_line": 168
} |
|
Prims.Tot | val va_update_stackTaint (sM sK: va_state) : va_state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK | val va_update_stackTaint (sM sK: va_state) : va_state
let va_update_stackTaint (sM sK: va_state) : va_state = | false | null | false | va_upd_stackTaint sM.ms_stackTaint sK | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_upd_stackTaint",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stackTaint"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_update_stackTaint (sM sK: va_state) : va_state | [] | Vale.PPC64LE.Decls.va_update_stackTaint | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | sM: Vale.PPC64LE.Decls.va_state -> sK: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Decls.va_state | {
"end_col": 122,
"end_line": 197,
"start_col": 85,
"start_line": 197
} |
Prims.Tot | val va_upd_operand_reg_opr (r: reg_opr) (v: nat64) (s: state) : state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s | val va_upd_operand_reg_opr (r: reg_opr) (v: nat64) (s: state) : state
let va_upd_operand_reg_opr (r: reg_opr) (v: nat64) (s: state) : state = | false | null | false | va_upd_reg r v s | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.reg_opr",
"Vale.PPC64LE.Machine_s.nat64",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.Decls.va_upd_reg"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_upd_operand_reg_opr (r: reg_opr) (v: nat64) (s: state) : state | [] | Vale.PPC64LE.Decls.va_upd_operand_reg_opr | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | r: Vale.PPC64LE.Decls.reg_opr -> v: Vale.PPC64LE.Machine_s.nat64 -> s: Vale.PPC64LE.State.state
-> Vale.PPC64LE.State.state | {
"end_col": 97,
"end_line": 219,
"start_col": 81,
"start_line": 219
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True | let va_is_dst_heaplet (h: heaplet_id) (s: va_state) = | false | null | false | True | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.heaplet_id",
"Vale.PPC64LE.Decls.va_state",
"Prims.l_True",
"Prims.logical"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_is_dst_heaplet : h: Vale.PPC64LE.Decls.heaplet_id -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | [] | Vale.PPC64LE.Decls.va_is_dst_heaplet | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | h: Vale.PPC64LE.Decls.heaplet_id -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | {
"end_col": 75,
"end_line": 170,
"start_col": 71,
"start_line": 170
} |
|
Prims.Tot | val va_update_operand_reg_opr (r: reg) (sM sK: va_state) : va_state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK | val va_update_operand_reg_opr (r: reg) (sM sK: va_state) : va_state
let va_update_operand_reg_opr (r: reg) (sM sK: va_state) : va_state = | false | null | false | va_update_reg r sM sK | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.reg",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_update_reg"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_update_operand_reg_opr (r: reg) (sM sK: va_state) : va_state | [] | Vale.PPC64LE.Decls.va_update_operand_reg_opr | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | r: Vale.PPC64LE.Machine_s.reg -> sM: Vale.PPC64LE.Decls.va_state -> sK: Vale.PPC64LE.Decls.va_state
-> Vale.PPC64LE.Decls.va_state | {
"end_col": 23,
"end_line": 201,
"start_col": 2,
"start_line": 201
} |
Prims.Tot | val va_update_cr0 (sM sK: va_state) : va_state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK | val va_update_cr0 (sM sK: va_state) : va_state
let va_update_cr0 (sM sK: va_state) : va_state = | false | null | false | va_upd_cr0 sM.cr0 sK | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_upd_cr0",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__cr0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_update_cr0 (sM sK: va_state) : va_state | [] | Vale.PPC64LE.Decls.va_update_cr0 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | sM: Vale.PPC64LE.Decls.va_state -> sK: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Decls.va_state | {
"end_col": 98,
"end_line": 186,
"start_col": 78,
"start_line": 186
} |
Prims.Tot | val va_Block (block: va_codes) : va_code | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_Block (block:va_codes) : va_code = Block block | val va_Block (block: va_codes) : va_code
let va_Block (block: va_codes) : va_code = | false | null | false | Block block | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_codes",
"Vale.PPC64LE.Machine_s.Block",
"Vale.PPC64LE.Decls.ins",
"Vale.PPC64LE.Decls.ocmp",
"Vale.PPC64LE.Decls.va_code"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes
[@va_qattr] unfold let va_CNil () : va_codes = []
[@va_qattr] unfold let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_Block (block: va_codes) : va_code | [] | Vale.PPC64LE.Decls.va_Block | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | block: Vale.PPC64LE.Decls.va_codes -> Vale.PPC64LE.Decls.va_code | {
"end_col": 60,
"end_line": 238,
"start_col": 49,
"start_line": 238
} |
Prims.Tot | val va_update_stack (sM sK: va_state) : va_state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK | val va_update_stack (sM sK: va_state) : va_state
let va_update_stack (sM sK: va_state) : va_state = | false | null | false | va_upd_stack (VSS.stack_from_s sM.ms_stack) sK | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_upd_stack",
"Vale.PPC64LE.Stack_Sems.stack_from_s",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stack"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state = | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_update_stack (sM sK: va_state) : va_state | [] | Vale.PPC64LE.Decls.va_update_stack | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | sM: Vale.PPC64LE.Decls.va_state -> sK: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Decls.va_state | {
"end_col": 126,
"end_line": 196,
"start_col": 80,
"start_line": 196
} |
Prims.Tot | val va_upd_cr0 (cr0: cr0_t) (s: state) : state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 } | val va_upd_cr0 (cr0: cr0_t) (s: state) : state
let va_upd_cr0 (cr0: cr0_t) (s: state) : state = | false | null | false | { s with cr0 = cr0 } | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.cr0_t",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.Machine_s.Mkstate",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ok",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__regs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__vecs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__xer",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_heap",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stack",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stackTaint"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_upd_cr0 (cr0: cr0_t) (s: state) : state | [] | Vale.PPC64LE.Decls.va_upd_cr0 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | cr0: Vale.PPC64LE.Machine_s.cr0_t -> s: Vale.PPC64LE.State.state -> Vale.PPC64LE.State.state | {
"end_col": 77,
"end_line": 173,
"start_col": 61,
"start_line": 173
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_buffer_3 (b1 b2 b3:M.buffer64) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (M.loc_union (loc_buffer b2) (loc_buffer b3))) h1 h2 | let modifies_buffer_3 (b1 b2 b3: M.buffer64) (h1 h2: vale_heap) = | false | null | false | modifies_mem (M.loc_union (loc_buffer b1) (M.loc_union (loc_buffer b2) (loc_buffer b3))) h1 h2 | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Memory.buffer64",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Decls.modifies_mem",
"Vale.PPC64LE.Memory.loc_union",
"Vale.PPC64LE.Decls.loc_buffer",
"Vale.PPC64LE.Memory.vuint64",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes
[@va_qattr] unfold let va_CNil () : va_codes = []
[@va_qattr] unfold let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl
// Constructors for va_code
unfold let va_Block (block:va_codes) : va_code = Block block
unfold let va_IfElse (ifCond:ocmp) (ifTrue:va_code) (ifFalse:va_code) : va_code = IfElse ifCond ifTrue ifFalse
unfold let va_While (whileCond:ocmp) (whileBody:va_code) : va_code = While whileCond whileBody
val va_cmp_eq (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ne (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_le (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ge (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_lt (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_gt (o1:cmp_opr) (o2:cmp_opr) : ocmp
unfold let va_get_block (c:va_code{Block? c}) : va_codes = Block?.block c
unfold let va_get_ifCond (c:va_code{IfElse? c}) : ocmp = IfElse?.ifCond c
unfold let va_get_ifTrue (c:va_code{IfElse? c}) : va_code = IfElse?.ifTrue c
unfold let va_get_ifFalse (c:va_code{IfElse? c}) : va_code = IfElse?.ifFalse c
unfold let va_get_whileCond (c:va_code{While? c}) : ocmp = While?.whileCond c
unfold let va_get_whileBody (c:va_code{While? c}) : va_code = While?.whileBody c
// Map syntax
// syntax for map accesses, m.[key] and m.[key] <- value
type map (key:eqtype) (value:Type) = Map.t key value
let (.[]) = Map.sel
let (.[]<-) = Map.upd
(** Memory framing **)
(*
unfold let in_mem (addr:int) (m:mem) : bool = m `Map.contains` addr
let disjoint (ptr1:int) (num_bytes1:int) (ptr2:int) (num_bytes2:int) =
ptr1 + num_bytes1 <= ptr2 \/ ptr2 + num_bytes2 <= ptr1
let validSrcAddrs (mem:mem) (addr:int) (size:int) (num_bytes:int) =
size == 64 /\
(forall (a:int) . {:pattern (mem `Map.contains` a)} addr <= a && a < addr+num_bytes && (a - addr) % 8 = 0 ==> mem `Map.contains` a)
let memModified (old_mem:mem) (new_mem:mem) (ptr:int) (num_bytes) =
(forall (a:int) . {:pattern (new_mem `Map.contains` a)} old_mem `Map.contains` a <==> new_mem `Map.contains` a) /\
(forall (a:int) . {:pattern (new_mem.[a]) \/ Map.sel new_mem a} a < ptr || a >= ptr + num_bytes ==> old_mem.[a] == new_mem.[ a])
*)
(** Convenient memory-related functions **)
let rec buffers_readable (h: vale_heap) (l: list M.buffer64) : GTot prop0 (decreases l) =
match l with
| [] -> True
| b :: l' -> buffer_readable h b /\ buffers_readable h l'
unfold let modifies_buffer (b:M.buffer64) (h1 h2:vale_heap) = modifies_mem (loc_buffer b) h1 h2
unfold let modifies_buffer_2 (b1 b2:M.buffer64) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (loc_buffer b2)) h1 h2 | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_buffer_3 : b1: Vale.PPC64LE.Memory.buffer64 ->
b2: Vale.PPC64LE.Memory.buffer64 ->
b3: Vale.PPC64LE.Memory.buffer64 ->
h1: Vale.PPC64LE.Decls.vale_heap ->
h2: Vale.PPC64LE.Decls.vale_heap
-> Vale.Def.Prop_s.prop0 | [] | Vale.PPC64LE.Decls.modifies_buffer_3 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
b1: Vale.PPC64LE.Memory.buffer64 ->
b2: Vale.PPC64LE.Memory.buffer64 ->
b3: Vale.PPC64LE.Memory.buffer64 ->
h1: Vale.PPC64LE.Decls.vale_heap ->
h2: Vale.PPC64LE.Decls.vale_heap
-> Vale.Def.Prop_s.prop0 | {
"end_col": 96,
"end_line": 289,
"start_col": 2,
"start_line": 289
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let buffers_disjoint (b1 b2:M.buffer64) =
locs_disjoint [loc_buffer b1; loc_buffer b2] | let buffers_disjoint (b1 b2: M.buffer64) = | false | null | false | locs_disjoint [loc_buffer b1; loc_buffer b2] | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Memory.buffer64",
"Vale.PPC64LE.Decls.locs_disjoint",
"Prims.Cons",
"Vale.PPC64LE.Memory.loc",
"Vale.PPC64LE.Decls.loc_buffer",
"Vale.PPC64LE.Memory.vuint64",
"Prims.Nil",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes
[@va_qattr] unfold let va_CNil () : va_codes = []
[@va_qattr] unfold let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl
// Constructors for va_code
unfold let va_Block (block:va_codes) : va_code = Block block
unfold let va_IfElse (ifCond:ocmp) (ifTrue:va_code) (ifFalse:va_code) : va_code = IfElse ifCond ifTrue ifFalse
unfold let va_While (whileCond:ocmp) (whileBody:va_code) : va_code = While whileCond whileBody
val va_cmp_eq (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ne (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_le (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ge (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_lt (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_gt (o1:cmp_opr) (o2:cmp_opr) : ocmp
unfold let va_get_block (c:va_code{Block? c}) : va_codes = Block?.block c
unfold let va_get_ifCond (c:va_code{IfElse? c}) : ocmp = IfElse?.ifCond c
unfold let va_get_ifTrue (c:va_code{IfElse? c}) : va_code = IfElse?.ifTrue c
unfold let va_get_ifFalse (c:va_code{IfElse? c}) : va_code = IfElse?.ifFalse c
unfold let va_get_whileCond (c:va_code{While? c}) : ocmp = While?.whileCond c
unfold let va_get_whileBody (c:va_code{While? c}) : va_code = While?.whileBody c
// Map syntax
// syntax for map accesses, m.[key] and m.[key] <- value
type map (key:eqtype) (value:Type) = Map.t key value
let (.[]) = Map.sel
let (.[]<-) = Map.upd
(** Memory framing **)
(*
unfold let in_mem (addr:int) (m:mem) : bool = m `Map.contains` addr
let disjoint (ptr1:int) (num_bytes1:int) (ptr2:int) (num_bytes2:int) =
ptr1 + num_bytes1 <= ptr2 \/ ptr2 + num_bytes2 <= ptr1
let validSrcAddrs (mem:mem) (addr:int) (size:int) (num_bytes:int) =
size == 64 /\
(forall (a:int) . {:pattern (mem `Map.contains` a)} addr <= a && a < addr+num_bytes && (a - addr) % 8 = 0 ==> mem `Map.contains` a)
let memModified (old_mem:mem) (new_mem:mem) (ptr:int) (num_bytes) =
(forall (a:int) . {:pattern (new_mem `Map.contains` a)} old_mem `Map.contains` a <==> new_mem `Map.contains` a) /\
(forall (a:int) . {:pattern (new_mem.[a]) \/ Map.sel new_mem a} a < ptr || a >= ptr + num_bytes ==> old_mem.[a] == new_mem.[ a])
*)
(** Convenient memory-related functions **)
let rec buffers_readable (h: vale_heap) (l: list M.buffer64) : GTot prop0 (decreases l) =
match l with
| [] -> True
| b :: l' -> buffer_readable h b /\ buffers_readable h l'
unfold let modifies_buffer (b:M.buffer64) (h1 h2:vale_heap) = modifies_mem (loc_buffer b) h1 h2
unfold let modifies_buffer_2 (b1 b2:M.buffer64) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (loc_buffer b2)) h1 h2
unfold let modifies_buffer_3 (b1 b2 b3:M.buffer64) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (M.loc_union (loc_buffer b2) (loc_buffer b3))) h1 h2
unfold let modifies_buffer128 (b:M.buffer128) (h1 h2:vale_heap) = modifies_mem (loc_buffer b) h1 h2
unfold let modifies_buffer128_2 (b1 b2:M.buffer128) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (loc_buffer b2)) h1 h2
unfold let modifies_buffer128_3 (b1 b2 b3:M.buffer128) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (M.loc_union (loc_buffer b2) (loc_buffer b3))) h1 h2
let validSrcAddrs (#t:base_typ) (h:vale_heap) (addr:int) (b:M.buffer t) (len:int) (layout:vale_heap_layout) (tn:taint) =
buffer_readable h b /\
len <= buffer_length b /\
M.buffer_addr b h == addr /\
M.valid_layout_buffer_id t b layout (M.get_heaplet_id h) false /\
M.valid_taint_buf b h layout.vl_taint tn
let validDstAddrs (#t:base_typ) (h:vale_heap) (addr:int) (b:M.buffer t) (len:int) (layout:vale_heap_layout) (tn:taint) =
validSrcAddrs h addr b len layout tn /\
M.valid_layout_buffer_id t b layout (M.get_heaplet_id h) true /\
buffer_writeable b
let validSrcAddrs64 (h:vale_heap) (addr:int) (b:M.buffer64) (len:int) (layout:vale_heap_layout) (tn:taint) =
validSrcAddrs h addr b len layout tn
let validDstAddrs64 (h:vale_heap) (addr:int) (b:M.buffer64) (len:int) (layout:vale_heap_layout) (tn:taint) =
validDstAddrs h addr b len layout tn
let validSrcAddrs128 (h:vale_heap) (addr:int) (b:M.buffer128) (len:int) (layout:vale_heap_layout) (tn:taint) =
validSrcAddrs h addr b len layout tn
let validDstAddrs128 (h:vale_heap) (addr:int) (b:M.buffer128) (len:int) (layout:vale_heap_layout) (tn:taint) =
validDstAddrs h addr b len layout tn
let validSrcAddrsOffset128 (h:vale_heap) (addr:int) (b:M.buffer128) (offset len:int) (layout:vale_heap_layout) (tn:taint) =
validSrcAddrs h (addr - 16 * offset) b (len + offset) layout tn
let validDstAddrsOffset128 (h:vale_heap) (addr:int) (b:M.buffer128) (offset len:int) (layout:vale_heap_layout) (tn:taint) =
validDstAddrs h (addr - 16 * offset) b (len + offset) layout tn
let modifies_buffer_specific128 (b:M.buffer128) (h1 h2:vale_heap) (start last:nat) : GTot prop0 =
modifies_buffer128 b h1 h2 /\
// TODO: Consider replacing this with: modifies (loc_buffer (gsub_buffer b i len)) h1 h2
(forall (i:nat) . {:pattern (Seq.index (M.buffer_as_seq h2 b) i)}
0 <= i /\ i < buffer_length b
/\ (i < start || i > last)
==> buffer128_read b i h1
== buffer128_read b i h2)
let buffer_modifies_specific128 (b:M.buffer128) (h1 h2:vale_heap) (start last:nat) : GTot prop0 =
// TODO: Consider replacing this with: modifies (loc_buffer (gsub_buffer b i len)) h1 h2
(forall (i:nat) . {:pattern (Seq.index (M.buffer_as_seq h2 b) i)}
0 <= i /\ i < buffer_length b
/\ (i < start || i > last)
==> buffer128_read b i h1
== buffer128_read b i h2)
let modifies_buffer_specific (b:M.buffer64) (h1 h2:vale_heap) (start last:nat) : GTot prop0 =
modifies_buffer b h1 h2 /\
// TODO: Consider replacing this with: modifies (loc_buffer (gsub_buffer b i len)) h1 h2
(forall (i:nat) . {:pattern (Seq.index (M.buffer_as_seq h2 b) i)}
0 <= i /\ i < buffer_length b
/\ (i < start || i > last)
==> buffer64_read b i h1
== buffer64_read b i h2) | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val buffers_disjoint : b1: Vale.PPC64LE.Memory.buffer64 -> b2: Vale.PPC64LE.Memory.buffer64 -> Vale.Def.Prop_s.prop0 | [] | Vale.PPC64LE.Decls.buffers_disjoint | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b1: Vale.PPC64LE.Memory.buffer64 -> b2: Vale.PPC64LE.Memory.buffer64 -> Vale.Def.Prop_s.prop0 | {
"end_col": 48,
"end_line": 353,
"start_col": 4,
"start_line": 353
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let buffers3_disjoint128 (b1 b2 b3:M.buffer128) =
locs_disjoint [loc_buffer b1; loc_buffer b2; loc_buffer b3] | let buffers3_disjoint128 (b1 b2 b3: M.buffer128) = | false | null | false | locs_disjoint [loc_buffer b1; loc_buffer b2; loc_buffer b3] | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Memory.buffer128",
"Vale.PPC64LE.Decls.locs_disjoint",
"Prims.Cons",
"Vale.PPC64LE.Memory.loc",
"Vale.PPC64LE.Decls.loc_buffer",
"Vale.PPC64LE.Memory.vuint128",
"Prims.Nil",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes
[@va_qattr] unfold let va_CNil () : va_codes = []
[@va_qattr] unfold let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl
// Constructors for va_code
unfold let va_Block (block:va_codes) : va_code = Block block
unfold let va_IfElse (ifCond:ocmp) (ifTrue:va_code) (ifFalse:va_code) : va_code = IfElse ifCond ifTrue ifFalse
unfold let va_While (whileCond:ocmp) (whileBody:va_code) : va_code = While whileCond whileBody
val va_cmp_eq (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ne (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_le (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ge (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_lt (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_gt (o1:cmp_opr) (o2:cmp_opr) : ocmp
unfold let va_get_block (c:va_code{Block? c}) : va_codes = Block?.block c
unfold let va_get_ifCond (c:va_code{IfElse? c}) : ocmp = IfElse?.ifCond c
unfold let va_get_ifTrue (c:va_code{IfElse? c}) : va_code = IfElse?.ifTrue c
unfold let va_get_ifFalse (c:va_code{IfElse? c}) : va_code = IfElse?.ifFalse c
unfold let va_get_whileCond (c:va_code{While? c}) : ocmp = While?.whileCond c
unfold let va_get_whileBody (c:va_code{While? c}) : va_code = While?.whileBody c
// Map syntax
// syntax for map accesses, m.[key] and m.[key] <- value
type map (key:eqtype) (value:Type) = Map.t key value
let (.[]) = Map.sel
let (.[]<-) = Map.upd
(** Memory framing **)
(*
unfold let in_mem (addr:int) (m:mem) : bool = m `Map.contains` addr
let disjoint (ptr1:int) (num_bytes1:int) (ptr2:int) (num_bytes2:int) =
ptr1 + num_bytes1 <= ptr2 \/ ptr2 + num_bytes2 <= ptr1
let validSrcAddrs (mem:mem) (addr:int) (size:int) (num_bytes:int) =
size == 64 /\
(forall (a:int) . {:pattern (mem `Map.contains` a)} addr <= a && a < addr+num_bytes && (a - addr) % 8 = 0 ==> mem `Map.contains` a)
let memModified (old_mem:mem) (new_mem:mem) (ptr:int) (num_bytes) =
(forall (a:int) . {:pattern (new_mem `Map.contains` a)} old_mem `Map.contains` a <==> new_mem `Map.contains` a) /\
(forall (a:int) . {:pattern (new_mem.[a]) \/ Map.sel new_mem a} a < ptr || a >= ptr + num_bytes ==> old_mem.[a] == new_mem.[ a])
*)
(** Convenient memory-related functions **)
let rec buffers_readable (h: vale_heap) (l: list M.buffer64) : GTot prop0 (decreases l) =
match l with
| [] -> True
| b :: l' -> buffer_readable h b /\ buffers_readable h l'
unfold let modifies_buffer (b:M.buffer64) (h1 h2:vale_heap) = modifies_mem (loc_buffer b) h1 h2
unfold let modifies_buffer_2 (b1 b2:M.buffer64) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (loc_buffer b2)) h1 h2
unfold let modifies_buffer_3 (b1 b2 b3:M.buffer64) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (M.loc_union (loc_buffer b2) (loc_buffer b3))) h1 h2
unfold let modifies_buffer128 (b:M.buffer128) (h1 h2:vale_heap) = modifies_mem (loc_buffer b) h1 h2
unfold let modifies_buffer128_2 (b1 b2:M.buffer128) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (loc_buffer b2)) h1 h2
unfold let modifies_buffer128_3 (b1 b2 b3:M.buffer128) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (M.loc_union (loc_buffer b2) (loc_buffer b3))) h1 h2
let validSrcAddrs (#t:base_typ) (h:vale_heap) (addr:int) (b:M.buffer t) (len:int) (layout:vale_heap_layout) (tn:taint) =
buffer_readable h b /\
len <= buffer_length b /\
M.buffer_addr b h == addr /\
M.valid_layout_buffer_id t b layout (M.get_heaplet_id h) false /\
M.valid_taint_buf b h layout.vl_taint tn
let validDstAddrs (#t:base_typ) (h:vale_heap) (addr:int) (b:M.buffer t) (len:int) (layout:vale_heap_layout) (tn:taint) =
validSrcAddrs h addr b len layout tn /\
M.valid_layout_buffer_id t b layout (M.get_heaplet_id h) true /\
buffer_writeable b
let validSrcAddrs64 (h:vale_heap) (addr:int) (b:M.buffer64) (len:int) (layout:vale_heap_layout) (tn:taint) =
validSrcAddrs h addr b len layout tn
let validDstAddrs64 (h:vale_heap) (addr:int) (b:M.buffer64) (len:int) (layout:vale_heap_layout) (tn:taint) =
validDstAddrs h addr b len layout tn
let validSrcAddrs128 (h:vale_heap) (addr:int) (b:M.buffer128) (len:int) (layout:vale_heap_layout) (tn:taint) =
validSrcAddrs h addr b len layout tn
let validDstAddrs128 (h:vale_heap) (addr:int) (b:M.buffer128) (len:int) (layout:vale_heap_layout) (tn:taint) =
validDstAddrs h addr b len layout tn
let validSrcAddrsOffset128 (h:vale_heap) (addr:int) (b:M.buffer128) (offset len:int) (layout:vale_heap_layout) (tn:taint) =
validSrcAddrs h (addr - 16 * offset) b (len + offset) layout tn
let validDstAddrsOffset128 (h:vale_heap) (addr:int) (b:M.buffer128) (offset len:int) (layout:vale_heap_layout) (tn:taint) =
validDstAddrs h (addr - 16 * offset) b (len + offset) layout tn
let modifies_buffer_specific128 (b:M.buffer128) (h1 h2:vale_heap) (start last:nat) : GTot prop0 =
modifies_buffer128 b h1 h2 /\
// TODO: Consider replacing this with: modifies (loc_buffer (gsub_buffer b i len)) h1 h2
(forall (i:nat) . {:pattern (Seq.index (M.buffer_as_seq h2 b) i)}
0 <= i /\ i < buffer_length b
/\ (i < start || i > last)
==> buffer128_read b i h1
== buffer128_read b i h2)
let buffer_modifies_specific128 (b:M.buffer128) (h1 h2:vale_heap) (start last:nat) : GTot prop0 =
// TODO: Consider replacing this with: modifies (loc_buffer (gsub_buffer b i len)) h1 h2
(forall (i:nat) . {:pattern (Seq.index (M.buffer_as_seq h2 b) i)}
0 <= i /\ i < buffer_length b
/\ (i < start || i > last)
==> buffer128_read b i h1
== buffer128_read b i h2)
let modifies_buffer_specific (b:M.buffer64) (h1 h2:vale_heap) (start last:nat) : GTot prop0 =
modifies_buffer b h1 h2 /\
// TODO: Consider replacing this with: modifies (loc_buffer (gsub_buffer b i len)) h1 h2
(forall (i:nat) . {:pattern (Seq.index (M.buffer_as_seq h2 b) i)}
0 <= i /\ i < buffer_length b
/\ (i < start || i > last)
==> buffer64_read b i h1
== buffer64_read b i h2)
unfold let buffers_disjoint (b1 b2:M.buffer64) =
locs_disjoint [loc_buffer b1; loc_buffer b2]
unfold let buffers_disjoint128 (b1 b2:M.buffer128) =
locs_disjoint [loc_buffer b1; loc_buffer b2]
let rec loc_locs_disjoint_rec128 (l:M.buffer128) (ls:list (M.buffer128)) : prop0 =
match ls with
| [] -> True
| h::t -> locs_disjoint [loc_buffer l; loc_buffer h] /\ loc_locs_disjoint_rec128 l t
unfold
let buffer_disjoints128 (l:M.buffer128) (ls:list (M.buffer128)) : prop0 =
norm [zeta; iota; delta_only [`%loc_locs_disjoint_rec128]] (loc_locs_disjoint_rec128 l ls)
let rec loc_locs_disjoint_rec64_128 (l:M.buffer64) (ls:list (M.buffer128)) : prop0 =
match ls with
| [] -> True
| h::t -> locs_disjoint [loc_buffer l; loc_buffer h] /\ loc_locs_disjoint_rec64_128 l t
unfold
let buffer_disjoints64_128 (l:M.buffer64) (ls:list (M.buffer128)) : prop0 =
norm [zeta; iota; delta_only [`%loc_locs_disjoint_rec64_128]] (loc_locs_disjoint_rec64_128 l ls) | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val buffers3_disjoint128 : b1: Vale.PPC64LE.Memory.buffer128 ->
b2: Vale.PPC64LE.Memory.buffer128 ->
b3: Vale.PPC64LE.Memory.buffer128
-> Vale.Def.Prop_s.prop0 | [] | Vale.PPC64LE.Decls.buffers3_disjoint128 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
b1: Vale.PPC64LE.Memory.buffer128 ->
b2: Vale.PPC64LE.Memory.buffer128 ->
b3: Vale.PPC64LE.Memory.buffer128
-> Vale.Def.Prop_s.prop0 | {
"end_col": 63,
"end_line": 377,
"start_col": 4,
"start_line": 377
} |
|
Prims.Tot | val va_whileInv_total (b: ocmp) (c: va_code) (s0 sN: va_state) (f0: va_fuel) : prop0 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_whileInv_total (b:ocmp) (c:va_code) (s0:va_state) (sN:va_state) (f0:va_fuel) : prop0 =
eval_while_inv (While b c) s0 f0 sN /\ state_inv s0 | val va_whileInv_total (b: ocmp) (c: va_code) (s0 sN: va_state) (f0: va_fuel) : prop0
let va_whileInv_total (b: ocmp) (c: va_code) (s0 sN: va_state) (f0: va_fuel) : prop0 = | false | null | false | eval_while_inv (While b c) s0 f0 sN /\ state_inv s0 | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.ocmp",
"Vale.PPC64LE.Decls.va_code",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_fuel",
"Prims.l_and",
"Vale.PPC64LE.Decls.eval_while_inv",
"Vale.PPC64LE.Machine_s.While",
"Vale.PPC64LE.Decls.ins",
"Vale.PPC64LE.Decls.state_inv",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes
[@va_qattr] unfold let va_CNil () : va_codes = []
[@va_qattr] unfold let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl
// Constructors for va_code
unfold let va_Block (block:va_codes) : va_code = Block block
unfold let va_IfElse (ifCond:ocmp) (ifTrue:va_code) (ifFalse:va_code) : va_code = IfElse ifCond ifTrue ifFalse
unfold let va_While (whileCond:ocmp) (whileBody:va_code) : va_code = While whileCond whileBody
val va_cmp_eq (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ne (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_le (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ge (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_lt (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_gt (o1:cmp_opr) (o2:cmp_opr) : ocmp
unfold let va_get_block (c:va_code{Block? c}) : va_codes = Block?.block c
unfold let va_get_ifCond (c:va_code{IfElse? c}) : ocmp = IfElse?.ifCond c
unfold let va_get_ifTrue (c:va_code{IfElse? c}) : va_code = IfElse?.ifTrue c
unfold let va_get_ifFalse (c:va_code{IfElse? c}) : va_code = IfElse?.ifFalse c
unfold let va_get_whileCond (c:va_code{While? c}) : ocmp = While?.whileCond c
unfold let va_get_whileBody (c:va_code{While? c}) : va_code = While?.whileBody c
// Map syntax
// syntax for map accesses, m.[key] and m.[key] <- value
type map (key:eqtype) (value:Type) = Map.t key value
let (.[]) = Map.sel
let (.[]<-) = Map.upd
(** Memory framing **)
(*
unfold let in_mem (addr:int) (m:mem) : bool = m `Map.contains` addr
let disjoint (ptr1:int) (num_bytes1:int) (ptr2:int) (num_bytes2:int) =
ptr1 + num_bytes1 <= ptr2 \/ ptr2 + num_bytes2 <= ptr1
let validSrcAddrs (mem:mem) (addr:int) (size:int) (num_bytes:int) =
size == 64 /\
(forall (a:int) . {:pattern (mem `Map.contains` a)} addr <= a && a < addr+num_bytes && (a - addr) % 8 = 0 ==> mem `Map.contains` a)
let memModified (old_mem:mem) (new_mem:mem) (ptr:int) (num_bytes) =
(forall (a:int) . {:pattern (new_mem `Map.contains` a)} old_mem `Map.contains` a <==> new_mem `Map.contains` a) /\
(forall (a:int) . {:pattern (new_mem.[a]) \/ Map.sel new_mem a} a < ptr || a >= ptr + num_bytes ==> old_mem.[a] == new_mem.[ a])
*)
(** Convenient memory-related functions **)
let rec buffers_readable (h: vale_heap) (l: list M.buffer64) : GTot prop0 (decreases l) =
match l with
| [] -> True
| b :: l' -> buffer_readable h b /\ buffers_readable h l'
unfold let modifies_buffer (b:M.buffer64) (h1 h2:vale_heap) = modifies_mem (loc_buffer b) h1 h2
unfold let modifies_buffer_2 (b1 b2:M.buffer64) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (loc_buffer b2)) h1 h2
unfold let modifies_buffer_3 (b1 b2 b3:M.buffer64) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (M.loc_union (loc_buffer b2) (loc_buffer b3))) h1 h2
unfold let modifies_buffer128 (b:M.buffer128) (h1 h2:vale_heap) = modifies_mem (loc_buffer b) h1 h2
unfold let modifies_buffer128_2 (b1 b2:M.buffer128) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (loc_buffer b2)) h1 h2
unfold let modifies_buffer128_3 (b1 b2 b3:M.buffer128) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (M.loc_union (loc_buffer b2) (loc_buffer b3))) h1 h2
let validSrcAddrs (#t:base_typ) (h:vale_heap) (addr:int) (b:M.buffer t) (len:int) (layout:vale_heap_layout) (tn:taint) =
buffer_readable h b /\
len <= buffer_length b /\
M.buffer_addr b h == addr /\
M.valid_layout_buffer_id t b layout (M.get_heaplet_id h) false /\
M.valid_taint_buf b h layout.vl_taint tn
let validDstAddrs (#t:base_typ) (h:vale_heap) (addr:int) (b:M.buffer t) (len:int) (layout:vale_heap_layout) (tn:taint) =
validSrcAddrs h addr b len layout tn /\
M.valid_layout_buffer_id t b layout (M.get_heaplet_id h) true /\
buffer_writeable b
let validSrcAddrs64 (h:vale_heap) (addr:int) (b:M.buffer64) (len:int) (layout:vale_heap_layout) (tn:taint) =
validSrcAddrs h addr b len layout tn
let validDstAddrs64 (h:vale_heap) (addr:int) (b:M.buffer64) (len:int) (layout:vale_heap_layout) (tn:taint) =
validDstAddrs h addr b len layout tn
let validSrcAddrs128 (h:vale_heap) (addr:int) (b:M.buffer128) (len:int) (layout:vale_heap_layout) (tn:taint) =
validSrcAddrs h addr b len layout tn
let validDstAddrs128 (h:vale_heap) (addr:int) (b:M.buffer128) (len:int) (layout:vale_heap_layout) (tn:taint) =
validDstAddrs h addr b len layout tn
let validSrcAddrsOffset128 (h:vale_heap) (addr:int) (b:M.buffer128) (offset len:int) (layout:vale_heap_layout) (tn:taint) =
validSrcAddrs h (addr - 16 * offset) b (len + offset) layout tn
let validDstAddrsOffset128 (h:vale_heap) (addr:int) (b:M.buffer128) (offset len:int) (layout:vale_heap_layout) (tn:taint) =
validDstAddrs h (addr - 16 * offset) b (len + offset) layout tn
let modifies_buffer_specific128 (b:M.buffer128) (h1 h2:vale_heap) (start last:nat) : GTot prop0 =
modifies_buffer128 b h1 h2 /\
// TODO: Consider replacing this with: modifies (loc_buffer (gsub_buffer b i len)) h1 h2
(forall (i:nat) . {:pattern (Seq.index (M.buffer_as_seq h2 b) i)}
0 <= i /\ i < buffer_length b
/\ (i < start || i > last)
==> buffer128_read b i h1
== buffer128_read b i h2)
let buffer_modifies_specific128 (b:M.buffer128) (h1 h2:vale_heap) (start last:nat) : GTot prop0 =
// TODO: Consider replacing this with: modifies (loc_buffer (gsub_buffer b i len)) h1 h2
(forall (i:nat) . {:pattern (Seq.index (M.buffer_as_seq h2 b) i)}
0 <= i /\ i < buffer_length b
/\ (i < start || i > last)
==> buffer128_read b i h1
== buffer128_read b i h2)
let modifies_buffer_specific (b:M.buffer64) (h1 h2:vale_heap) (start last:nat) : GTot prop0 =
modifies_buffer b h1 h2 /\
// TODO: Consider replacing this with: modifies (loc_buffer (gsub_buffer b i len)) h1 h2
(forall (i:nat) . {:pattern (Seq.index (M.buffer_as_seq h2 b) i)}
0 <= i /\ i < buffer_length b
/\ (i < start || i > last)
==> buffer64_read b i h1
== buffer64_read b i h2)
unfold let buffers_disjoint (b1 b2:M.buffer64) =
locs_disjoint [loc_buffer b1; loc_buffer b2]
unfold let buffers_disjoint128 (b1 b2:M.buffer128) =
locs_disjoint [loc_buffer b1; loc_buffer b2]
let rec loc_locs_disjoint_rec128 (l:M.buffer128) (ls:list (M.buffer128)) : prop0 =
match ls with
| [] -> True
| h::t -> locs_disjoint [loc_buffer l; loc_buffer h] /\ loc_locs_disjoint_rec128 l t
unfold
let buffer_disjoints128 (l:M.buffer128) (ls:list (M.buffer128)) : prop0 =
norm [zeta; iota; delta_only [`%loc_locs_disjoint_rec128]] (loc_locs_disjoint_rec128 l ls)
let rec loc_locs_disjoint_rec64_128 (l:M.buffer64) (ls:list (M.buffer128)) : prop0 =
match ls with
| [] -> True
| h::t -> locs_disjoint [loc_buffer l; loc_buffer h] /\ loc_locs_disjoint_rec64_128 l t
unfold
let buffer_disjoints64_128 (l:M.buffer64) (ls:list (M.buffer128)) : prop0 =
norm [zeta; iota; delta_only [`%loc_locs_disjoint_rec64_128]] (loc_locs_disjoint_rec64_128 l ls)
unfold let buffers3_disjoint128 (b1 b2 b3:M.buffer128) =
locs_disjoint [loc_buffer b1; loc_buffer b2; loc_buffer b3]
val eval_code (c:va_code) (s0:va_state) (f0:va_fuel) (sN:va_state) : prop0
val eval_while_inv (c:va_code) (s0:va_state) (fW:va_fuel) (sW:va_state) : prop0
[@va_qattr]
let va_state_eq (s0:va_state) (s1:va_state) : prop0 = state_eq s0 s1
let state_inv (s:state) : prop0 = M.mem_inv (coerce s.ms_heap)
let va_require_total (c0:va_code) (c1:va_code) (s0:va_state) : prop0 =
c0 == c1 /\ state_inv s0
let va_ensure_total (c0:va_code) (s0:va_state) (s1:va_state) (f1:va_fuel) : prop0 =
eval_code c0 s0 f1 s1 /\ state_inv s1
val va_ins_lemma (c0:va_code) (s0:va_state) : Lemma
(requires True)
(ensures True)
val eval_ocmp : s:va_state -> c:ocmp -> GTot bool
unfold let va_evalCond (b:ocmp) (s:va_state) : GTot bool = eval_ocmp s b
val valid_ocmp : c:ocmp -> s:va_state -> GTot bool
val eval_cmp_cr0 : s:va_state -> c:ocmp -> cr0_t
val lemma_cmp_eq : s:va_state -> o1:cmp_opr -> o2:cmp_opr -> Lemma
(requires True)
(ensures (eval_ocmp s (va_cmp_eq o1 o2)) <==> (va_eval_cmp_opr s o1 == va_eval_cmp_opr s o2))
[SMTPat (eval_ocmp s (va_cmp_eq o1 o2))]
val lemma_cmp_ne : s:va_state -> o1:cmp_opr -> o2:cmp_opr -> Lemma
(requires True)
(ensures (eval_ocmp s (va_cmp_ne o1 o2)) <==> (va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2))
[SMTPat (eval_ocmp s (va_cmp_ne o1 o2))]
val lemma_cmp_le : s:va_state -> o1:cmp_opr -> o2:cmp_opr -> Lemma
(requires True)
(ensures (eval_ocmp s (va_cmp_le o1 o2)) <==> (va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2))
[SMTPat (eval_ocmp s (va_cmp_le o1 o2))]
val lemma_cmp_ge : s:va_state -> o1:cmp_opr -> o2:cmp_opr -> Lemma
(requires True)
(ensures (eval_ocmp s (va_cmp_ge o1 o2)) <==> (va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2))
[SMTPat (eval_ocmp s (va_cmp_ge o1 o2))]
val lemma_cmp_lt : s:va_state -> o1:cmp_opr -> o2:cmp_opr -> Lemma
(requires True)
(ensures (eval_ocmp s (va_cmp_lt o1 o2)) <==> (va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2))
[SMTPat (eval_ocmp s (va_cmp_lt o1 o2))]
val lemma_cmp_gt : s:va_state -> o1:cmp_opr -> o2:cmp_opr -> Lemma
(requires True)
(ensures (eval_ocmp s (va_cmp_gt o1 o2)) <==> (va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2))
[SMTPat (eval_ocmp s (va_cmp_gt o1 o2))]
val lemma_valid_cmp_eq : s:va_state -> o1:cmp_opr -> o2:cmp_opr -> Lemma
(requires True)
(ensures valid_first_cmp_opr o1 ==> (valid_ocmp (va_cmp_eq o1 o2) s))
[SMTPat (valid_ocmp (va_cmp_eq o1 o2) s)]
val lemma_valid_cmp_ne : s:va_state -> o1:cmp_opr -> o2:cmp_opr -> Lemma
(requires True)
(ensures valid_first_cmp_opr o1 ==> (valid_ocmp (va_cmp_ne o1 o2) s))
[SMTPat (valid_ocmp (va_cmp_ne o1 o2) s)]
val lemma_valid_cmp_le : s:va_state -> o1:cmp_opr -> o2:cmp_opr -> Lemma
(requires True)
(ensures valid_first_cmp_opr o1 ==> (valid_ocmp (va_cmp_le o1 o2) s))
[SMTPat (valid_ocmp (va_cmp_le o1 o2) s)]
val lemma_valid_cmp_ge : s:va_state -> o1:cmp_opr -> o2:cmp_opr -> Lemma
(requires True)
(ensures valid_first_cmp_opr o1 ==> (valid_ocmp (va_cmp_ge o1 o2) s))
[SMTPat (valid_ocmp (va_cmp_ge o1 o2) s)]
val lemma_valid_cmp_lt : s:va_state -> o1:cmp_opr -> o2:cmp_opr -> Lemma
(requires True)
(ensures valid_first_cmp_opr o1 ==> (valid_ocmp (va_cmp_lt o1 o2) s))
[SMTPat (valid_ocmp (va_cmp_lt o1 o2) s)]
val lemma_valid_cmp_gt : s:va_state -> o1:cmp_opr -> o2:cmp_opr -> Lemma
(requires True)
(ensures valid_first_cmp_opr o1 ==> (valid_ocmp (va_cmp_gt o1 o2) s))
[SMTPat (valid_ocmp (va_cmp_gt o1 o2) s)]
val va_compute_merge_total (f0:va_fuel) (fM:va_fuel) : va_fuel
val va_lemma_merge_total (b0:va_codes) (s0:va_state) (f0:va_fuel) (sM:va_state) (fM:va_fuel) (sN:va_state) : Ghost va_fuel
(requires
Cons? b0 /\
eval_code (Cons?.hd b0) s0 f0 sM /\
eval_code (va_Block (Cons?.tl b0)) sM fM sN
)
(ensures (fun fN ->
fN == va_compute_merge_total f0 fM /\
eval_code (va_Block b0) s0 fN sN
))
val va_lemma_empty_total (s0:va_state) (bN:va_codes) : Ghost (va_state & va_fuel)
(requires True)
(ensures (fun (sM, fM) ->
s0 == sM /\
eval_code (va_Block []) s0 fM sM
))
val va_lemma_ifElse_total (ifb:ocmp) (ct:va_code) (cf:va_code) (s0:va_state) : Ghost (bool & va_state & va_state & va_fuel)
(requires True)
(ensures (fun (cond, sM, sN, f0) ->
cond == eval_ocmp s0 ifb /\
sM == ({s0 with cr0 = eval_cmp_cr0 s0 ifb})
))
val va_lemma_ifElseTrue_total (ifb:ocmp) (ct:va_code) (cf:va_code) (s0:va_state) (f0:va_fuel) (sM:va_state) : Lemma
(requires
valid_ocmp ifb s0 /\
eval_ocmp s0 ifb /\
eval_code ct ({s0 with cr0 = eval_cmp_cr0 s0 ifb}) f0 sM
)
(ensures
eval_code (IfElse ifb ct cf) s0 f0 sM
)
val va_lemma_ifElseFalse_total (ifb:ocmp) (ct:va_code) (cf:va_code) (s0:va_state) (f0:va_fuel) (sM:va_state) : Lemma
(requires
valid_ocmp ifb s0 /\
not (eval_ocmp s0 ifb) /\
eval_code cf ({s0 with cr0 = eval_cmp_cr0 s0 ifb}) f0 sM
)
(ensures
eval_code (IfElse ifb ct cf) s0 f0 sM
) | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_whileInv_total (b: ocmp) (c: va_code) (s0 sN: va_state) (f0: va_fuel) : prop0 | [] | Vale.PPC64LE.Decls.va_whileInv_total | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
b: Vale.PPC64LE.Decls.ocmp ->
c: Vale.PPC64LE.Decls.va_code ->
s0: Vale.PPC64LE.Decls.va_state ->
sN: Vale.PPC64LE.Decls.va_state ->
f0: Vale.PPC64LE.Decls.va_fuel
-> Vale.Def.Prop_s.prop0 | {
"end_col": 53,
"end_line": 512,
"start_col": 2,
"start_line": 512
} |
Prims.GTot | val modifies_buffer_specific (b: M.buffer64) (h1 h2: vale_heap) (start last: nat) : GTot prop0 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_buffer_specific (b:M.buffer64) (h1 h2:vale_heap) (start last:nat) : GTot prop0 =
modifies_buffer b h1 h2 /\
// TODO: Consider replacing this with: modifies (loc_buffer (gsub_buffer b i len)) h1 h2
(forall (i:nat) . {:pattern (Seq.index (M.buffer_as_seq h2 b) i)}
0 <= i /\ i < buffer_length b
/\ (i < start || i > last)
==> buffer64_read b i h1
== buffer64_read b i h2) | val modifies_buffer_specific (b: M.buffer64) (h1 h2: vale_heap) (start last: nat) : GTot prop0
let modifies_buffer_specific (b: M.buffer64) (h1 h2: vale_heap) (start last: nat) : GTot prop0 = | false | null | false | modifies_buffer b h1 h2 /\
(forall (i: nat). {:pattern (Seq.index (M.buffer_as_seq h2 b) i)}
0 <= i /\ i < buffer_length b /\ (i < start || i > last) ==>
buffer64_read b i h1 == buffer64_read b i h2) | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial"
] | [
"Vale.PPC64LE.Memory.buffer64",
"Vale.PPC64LE.Decls.vale_heap",
"Prims.nat",
"Prims.l_and",
"Vale.PPC64LE.Decls.modifies_buffer",
"Prims.l_Forall",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_LessThan",
"Vale.PPC64LE.Decls.buffer_length",
"Vale.PPC64LE.Memory.vuint64",
"Prims.op_BarBar",
"Prims.op_GreaterThan",
"Prims.eq2",
"Vale.PPC64LE.Machine_s.nat64",
"Vale.PPC64LE.Decls.buffer64_read",
"FStar.Seq.Base.index",
"Vale.PPC64LE.Memory.base_typ_as_vale_type",
"Vale.PPC64LE.Memory.buffer_as_seq",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes
[@va_qattr] unfold let va_CNil () : va_codes = []
[@va_qattr] unfold let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl
// Constructors for va_code
unfold let va_Block (block:va_codes) : va_code = Block block
unfold let va_IfElse (ifCond:ocmp) (ifTrue:va_code) (ifFalse:va_code) : va_code = IfElse ifCond ifTrue ifFalse
unfold let va_While (whileCond:ocmp) (whileBody:va_code) : va_code = While whileCond whileBody
val va_cmp_eq (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ne (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_le (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ge (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_lt (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_gt (o1:cmp_opr) (o2:cmp_opr) : ocmp
unfold let va_get_block (c:va_code{Block? c}) : va_codes = Block?.block c
unfold let va_get_ifCond (c:va_code{IfElse? c}) : ocmp = IfElse?.ifCond c
unfold let va_get_ifTrue (c:va_code{IfElse? c}) : va_code = IfElse?.ifTrue c
unfold let va_get_ifFalse (c:va_code{IfElse? c}) : va_code = IfElse?.ifFalse c
unfold let va_get_whileCond (c:va_code{While? c}) : ocmp = While?.whileCond c
unfold let va_get_whileBody (c:va_code{While? c}) : va_code = While?.whileBody c
// Map syntax
// syntax for map accesses, m.[key] and m.[key] <- value
type map (key:eqtype) (value:Type) = Map.t key value
let (.[]) = Map.sel
let (.[]<-) = Map.upd
(** Memory framing **)
(*
unfold let in_mem (addr:int) (m:mem) : bool = m `Map.contains` addr
let disjoint (ptr1:int) (num_bytes1:int) (ptr2:int) (num_bytes2:int) =
ptr1 + num_bytes1 <= ptr2 \/ ptr2 + num_bytes2 <= ptr1
let validSrcAddrs (mem:mem) (addr:int) (size:int) (num_bytes:int) =
size == 64 /\
(forall (a:int) . {:pattern (mem `Map.contains` a)} addr <= a && a < addr+num_bytes && (a - addr) % 8 = 0 ==> mem `Map.contains` a)
let memModified (old_mem:mem) (new_mem:mem) (ptr:int) (num_bytes) =
(forall (a:int) . {:pattern (new_mem `Map.contains` a)} old_mem `Map.contains` a <==> new_mem `Map.contains` a) /\
(forall (a:int) . {:pattern (new_mem.[a]) \/ Map.sel new_mem a} a < ptr || a >= ptr + num_bytes ==> old_mem.[a] == new_mem.[ a])
*)
(** Convenient memory-related functions **)
let rec buffers_readable (h: vale_heap) (l: list M.buffer64) : GTot prop0 (decreases l) =
match l with
| [] -> True
| b :: l' -> buffer_readable h b /\ buffers_readable h l'
unfold let modifies_buffer (b:M.buffer64) (h1 h2:vale_heap) = modifies_mem (loc_buffer b) h1 h2
unfold let modifies_buffer_2 (b1 b2:M.buffer64) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (loc_buffer b2)) h1 h2
unfold let modifies_buffer_3 (b1 b2 b3:M.buffer64) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (M.loc_union (loc_buffer b2) (loc_buffer b3))) h1 h2
unfold let modifies_buffer128 (b:M.buffer128) (h1 h2:vale_heap) = modifies_mem (loc_buffer b) h1 h2
unfold let modifies_buffer128_2 (b1 b2:M.buffer128) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (loc_buffer b2)) h1 h2
unfold let modifies_buffer128_3 (b1 b2 b3:M.buffer128) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (M.loc_union (loc_buffer b2) (loc_buffer b3))) h1 h2
let validSrcAddrs (#t:base_typ) (h:vale_heap) (addr:int) (b:M.buffer t) (len:int) (layout:vale_heap_layout) (tn:taint) =
buffer_readable h b /\
len <= buffer_length b /\
M.buffer_addr b h == addr /\
M.valid_layout_buffer_id t b layout (M.get_heaplet_id h) false /\
M.valid_taint_buf b h layout.vl_taint tn
let validDstAddrs (#t:base_typ) (h:vale_heap) (addr:int) (b:M.buffer t) (len:int) (layout:vale_heap_layout) (tn:taint) =
validSrcAddrs h addr b len layout tn /\
M.valid_layout_buffer_id t b layout (M.get_heaplet_id h) true /\
buffer_writeable b
let validSrcAddrs64 (h:vale_heap) (addr:int) (b:M.buffer64) (len:int) (layout:vale_heap_layout) (tn:taint) =
validSrcAddrs h addr b len layout tn
let validDstAddrs64 (h:vale_heap) (addr:int) (b:M.buffer64) (len:int) (layout:vale_heap_layout) (tn:taint) =
validDstAddrs h addr b len layout tn
let validSrcAddrs128 (h:vale_heap) (addr:int) (b:M.buffer128) (len:int) (layout:vale_heap_layout) (tn:taint) =
validSrcAddrs h addr b len layout tn
let validDstAddrs128 (h:vale_heap) (addr:int) (b:M.buffer128) (len:int) (layout:vale_heap_layout) (tn:taint) =
validDstAddrs h addr b len layout tn
let validSrcAddrsOffset128 (h:vale_heap) (addr:int) (b:M.buffer128) (offset len:int) (layout:vale_heap_layout) (tn:taint) =
validSrcAddrs h (addr - 16 * offset) b (len + offset) layout tn
let validDstAddrsOffset128 (h:vale_heap) (addr:int) (b:M.buffer128) (offset len:int) (layout:vale_heap_layout) (tn:taint) =
validDstAddrs h (addr - 16 * offset) b (len + offset) layout tn
let modifies_buffer_specific128 (b:M.buffer128) (h1 h2:vale_heap) (start last:nat) : GTot prop0 =
modifies_buffer128 b h1 h2 /\
// TODO: Consider replacing this with: modifies (loc_buffer (gsub_buffer b i len)) h1 h2
(forall (i:nat) . {:pattern (Seq.index (M.buffer_as_seq h2 b) i)}
0 <= i /\ i < buffer_length b
/\ (i < start || i > last)
==> buffer128_read b i h1
== buffer128_read b i h2)
let buffer_modifies_specific128 (b:M.buffer128) (h1 h2:vale_heap) (start last:nat) : GTot prop0 =
// TODO: Consider replacing this with: modifies (loc_buffer (gsub_buffer b i len)) h1 h2
(forall (i:nat) . {:pattern (Seq.index (M.buffer_as_seq h2 b) i)}
0 <= i /\ i < buffer_length b
/\ (i < start || i > last)
==> buffer128_read b i h1
== buffer128_read b i h2) | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_buffer_specific (b: M.buffer64) (h1 h2: vale_heap) (start last: nat) : GTot prop0 | [] | Vale.PPC64LE.Decls.modifies_buffer_specific | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
b: Vale.PPC64LE.Memory.buffer64 ->
h1: Vale.PPC64LE.Decls.vale_heap ->
h2: Vale.PPC64LE.Decls.vale_heap ->
start: Prims.nat ->
last: Prims.nat
-> Prims.GTot Vale.Def.Prop_s.prop0 | {
"end_col": 45,
"end_line": 350,
"start_col": 4,
"start_line": 344
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_update = Map.upd | let va_update = | false | null | false | Map.upd | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Prims.eqtype",
"FStar.Map.upd",
"FStar.Map.t"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_update : _: FStar.Map.t _ _ -> _: _ -> _: _ -> FStar.Map.t _ _ | [] | Vale.PPC64LE.Decls.va_update | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | _: FStar.Map.t _ _ -> _: _ -> _: _ -> FStar.Map.t _ _ | {
"end_col": 30,
"end_line": 37,
"start_col": 23,
"start_line": 37
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let vale_full_heap = M.vale_full_heap | let vale_full_heap = | false | null | false | M.vale_full_heap | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Memory.vale_full_heap"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val vale_full_heap : Type | [] | Vale.PPC64LE.Decls.vale_full_heap | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type | {
"end_col": 44,
"end_line": 27,
"start_col": 28,
"start_line": 27
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_operand_heaplet = heaplet_id | let va_operand_heaplet = | false | null | false | heaplet_id | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.heaplet_id"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_operand_heaplet : Type0 | [] | Vale.PPC64LE.Decls.va_operand_heaplet | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type0 | {
"end_col": 42,
"end_line": 66,
"start_col": 32,
"start_line": 66
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_code = precode ins ocmp | let va_code = | false | null | false | precode ins ocmp | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.precode",
"Vale.PPC64LE.Decls.ins",
"Vale.PPC64LE.Decls.ocmp"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0 | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_code : Type0 | [] | Vale.PPC64LE.Decls.va_code | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type0 | {
"end_col": 37,
"end_line": 56,
"start_col": 21,
"start_line": 56
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x' | let va_reveal_eq (#ax: Type) (s: string) (x x': ax) = | false | null | false | norm [zeta; delta_only [s]] #ax x == x' | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Prims.string",
"Prims.eq2",
"FStar.Pervasives.norm",
"Prims.Cons",
"FStar.Pervasives.norm_step",
"FStar.Pervasives.zeta",
"FStar.Pervasives.delta_only",
"Prims.Nil",
"Prims.logical"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_reveal_eq : s: Prims.string -> x: ax -> x': ax -> Prims.logical | [] | Vale.PPC64LE.Decls.va_reveal_eq | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Prims.string -> x: ax -> x': ax -> Prims.logical | {
"end_col": 109,
"end_line": 43,
"start_col": 70,
"start_line": 43
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2} | let va_int_range (k1 k2: int) = | false | null | false | i: int{k1 <= i /\ i <= k2} | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Prims.int",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k} | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_int_range : k1: Prims.int -> k2: Prims.int -> Type0 | [] | Vale.PPC64LE.Decls.va_int_range | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | k1: Prims.int -> k2: Prims.int -> Type0 | {
"end_col": 56,
"end_line": 53,
"start_col": 31,
"start_line": 53
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_operand_Mem64 = maddr | let va_operand_Mem64 = | false | null | false | maddr | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.maddr"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_operand_Mem64 : Type0 | [] | Vale.PPC64LE.Decls.va_operand_Mem64 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type0 | {
"end_col": 35,
"end_line": 63,
"start_col": 30,
"start_line": 63
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let reg_opr = reg | let reg_opr = | false | null | false | reg | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.reg"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val reg_opr : Type0 | [] | Vale.PPC64LE.Decls.reg_opr | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type0 | {
"end_col": 24,
"end_line": 61,
"start_col": 21,
"start_line": 61
} |
|
Prims.Tot | val va_mul_nat (x y: nat) : nat | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y | val va_mul_nat (x y: nat) : nat
let va_mul_nat (x y: nat) : nat = | false | null | false | mul_nat_helper x y;
x * y | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Prims.nat",
"FStar.Mul.op_Star",
"Prims.unit",
"Vale.PPC64LE.Decls.mul_nat_helper"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0) | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_mul_nat (x y: nat) : nat | [] | Vale.PPC64LE.Decls.va_mul_nat | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: Prims.nat -> y: Prims.nat -> Prims.nat | {
"end_col": 7,
"end_line": 76,
"start_col": 2,
"start_line": 75
} |
Prims.Tot | val va_expand_state (s: state) : state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_expand_state (s:state) : state = s | val va_expand_state (s: state) : state
let va_expand_state (s: state) : state = | false | null | false | s | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.State.state"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_expand_state (s: state) : state | [] | Vale.PPC64LE.Decls.va_expand_state | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Vale.PPC64LE.State.state -> Vale.PPC64LE.State.state | {
"end_col": 60,
"end_line": 78,
"start_col": 59,
"start_line": 78
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_state = state | let va_state = | false | null | false | state | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.State.state"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_state : Type | [] | Vale.PPC64LE.Decls.va_state | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type | {
"end_col": 27,
"end_line": 59,
"start_col": 22,
"start_line": 59
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_int_at_most (k:int) = i:int{i <= k} | let va_int_at_most (k: int) = | false | null | false | i: int{i <= k} | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Prims.int",
"Prims.b2t",
"Prims.op_LessThanOrEqual"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_int_at_most : k: Prims.int -> Type0 | [] | Vale.PPC64LE.Decls.va_int_at_most | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | k: Prims.int -> Type0 | {
"end_col": 42,
"end_line": 52,
"start_col": 29,
"start_line": 52
} |
|
Prims.Tot | val valid_dst_addr (#t: M.base_typ) (m: vale_heap) (b: M.buffer t) (i: int) : prop0 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i | val valid_dst_addr (#t: M.base_typ) (m: vale_heap) (b: M.buffer t) (i: int) : prop0
let valid_dst_addr (#t: M.base_typ) (m: vale_heap) (b: M.buffer t) (i: int) : prop0 = | false | null | false | M.valid_buffer_write m b i | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.Arch.HeapTypes_s.base_typ",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer",
"Prims.int",
"Vale.PPC64LE.Memory.valid_buffer_write",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val valid_dst_addr (#t: M.base_typ) (m: vale_heap) (b: M.buffer t) (i: int) : prop0 | [] | Vale.PPC64LE.Decls.valid_dst_addr | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | m: Vale.PPC64LE.Decls.vale_heap -> b: Vale.PPC64LE.Memory.buffer t -> i: Prims.int
-> Vale.Def.Prop_s.prop0 | {
"end_col": 115,
"end_line": 89,
"start_col": 89,
"start_line": 89
} |
Prims.GTot | val buffer128_read (b: M.buffer128) (i: int) (h: vale_heap) : GTot quad32 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h | val buffer128_read (b: M.buffer128) (i: int) (h: vale_heap) : GTot quad32
let buffer128_read (b: M.buffer128) (i: int) (h: vale_heap) : GTot quad32 = | false | null | false | M.buffer_read b i h | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial"
] | [
"Vale.PPC64LE.Memory.buffer128",
"Prims.int",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer_read",
"Vale.PPC64LE.Memory.vuint128",
"Vale.PPC64LE.Machine_s.quad32"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val buffer128_read (b: M.buffer128) (i: int) (h: vale_heap) : GTot quad32 | [] | Vale.PPC64LE.Decls.buffer128_read | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b: Vale.PPC64LE.Memory.buffer128 -> i: Prims.int -> h: Vale.PPC64LE.Decls.vale_heap
-> Prims.GTot Vale.PPC64LE.Machine_s.quad32 | {
"end_col": 99,
"end_line": 91,
"start_col": 80,
"start_line": 91
} |
Prims.GTot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b | let buffer_length (#t: M.base_typ) (b: M.buffer t) = | false | null | false | M.buffer_length #t b | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial"
] | [
"Vale.Arch.HeapTypes_s.base_typ",
"Vale.PPC64LE.Memory.buffer",
"Vale.PPC64LE.Memory.buffer_length",
"Prims.nat"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val buffer_length : b: Vale.PPC64LE.Memory.buffer t -> Prims.GTot Prims.nat | [] | Vale.PPC64LE.Decls.buffer_length | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b: Vale.PPC64LE.Memory.buffer t -> Prims.GTot Prims.nat | {
"end_col": 78,
"end_line": 82,
"start_col": 58,
"start_line": 82
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_operand_vec_opr = vec | let va_operand_vec_opr = | false | null | false | vec | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.vec"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_operand_vec_opr : Type0 | [] | Vale.PPC64LE.Decls.va_operand_vec_opr | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type0 | {
"end_col": 35,
"end_line": 65,
"start_col": 32,
"start_line": 65
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_int_at_least (k:int) = i:int{i >= k} | let va_int_at_least (k: int) = | false | null | false | i: int{i >= k} | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Prims.int",
"Prims.b2t",
"Prims.op_GreaterThanOrEqual"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y () | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_int_at_least : k: Prims.int -> Type0 | [] | Vale.PPC64LE.Decls.va_int_at_least | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | k: Prims.int -> Type0 | {
"end_col": 43,
"end_line": 51,
"start_col": 30,
"start_line": 51
} |
|
Prims.GTot | val buffer_readable (#t: M.base_typ) (h: vale_heap) (b: M.buffer t) : GTot prop0 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b | val buffer_readable (#t: M.base_typ) (h: vale_heap) (b: M.buffer t) : GTot prop0
let buffer_readable (#t: M.base_typ) (h: vale_heap) (b: M.buffer t) : GTot prop0 = | false | null | false | M.buffer_readable #t h b | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial"
] | [
"Vale.Arch.HeapTypes_s.base_typ",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer",
"Vale.PPC64LE.Memory.buffer_readable",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val buffer_readable (#t: M.base_typ) (h: vale_heap) (b: M.buffer t) : GTot prop0 | [] | Vale.PPC64LE.Decls.buffer_readable | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | h: Vale.PPC64LE.Decls.vale_heap -> b: Vale.PPC64LE.Memory.buffer t
-> Prims.GTot Vale.Def.Prop_s.prop0 | {
"end_col": 111,
"end_line": 80,
"start_col": 87,
"start_line": 80
} |
Prims.GTot | val buffer_writeable (#t: M.base_typ) (b: M.buffer t) : GTot prop0 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b | val buffer_writeable (#t: M.base_typ) (b: M.buffer t) : GTot prop0
let buffer_writeable (#t: M.base_typ) (b: M.buffer t) : GTot prop0 = | false | null | false | M.buffer_writeable #t b | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial"
] | [
"Vale.Arch.HeapTypes_s.base_typ",
"Vale.PPC64LE.Memory.buffer",
"Vale.PPC64LE.Memory.buffer_writeable",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val buffer_writeable (#t: M.base_typ) (b: M.buffer t) : GTot prop0 | [] | Vale.PPC64LE.Decls.buffer_writeable | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b: Vale.PPC64LE.Memory.buffer t -> Prims.GTot Vale.Def.Prop_s.prop0 | {
"end_col": 97,
"end_line": 81,
"start_col": 74,
"start_line": 81
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_operand_reg_opr = reg | let va_operand_reg_opr = | false | null | false | reg | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.reg"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0 | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_operand_reg_opr : Type0 | [] | Vale.PPC64LE.Decls.va_operand_reg_opr | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type0 | {
"end_col": 35,
"end_line": 62,
"start_col": 32,
"start_line": 62
} |
|
Prims.Tot | val valid_stack128 (m: maddr) (t: taint) (s: state) : prop0 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint | val valid_stack128 (m: maddr) (t: taint) (s: state) : prop0
let valid_stack128 (m: maddr) (t: taint) (s: state) : prop0 = | false | null | false | SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.maddr",
"Vale.Arch.HeapTypes_s.taint",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.Stack_i.valid_taint_stack128",
"Vale.PPC64LE.State.eval_maddr",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stackTaint",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr] | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val valid_stack128 (m: maddr) (t: taint) (s: state) : prop0 | [] | Vale.PPC64LE.Decls.valid_stack128 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | m: Vale.PPC64LE.Machine_s.maddr -> t: Vale.Arch.HeapTypes_s.taint -> s: Vale.PPC64LE.State.state
-> Vale.Def.Prop_s.prop0 | {
"end_col": 60,
"end_line": 128,
"start_col": 2,
"start_line": 128
} |
Prims.GTot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_union = M.loc_union | let loc_union = | false | null | false | M.loc_union | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial"
] | [
"Vale.PPC64LE.Memory.loc_union"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_union : s1: Vale.PPC64LE.Memory.loc -> s2: Vale.PPC64LE.Memory.loc -> Prims.GTot Vale.PPC64LE.Memory.loc | [] | Vale.PPC64LE.Decls.loc_union | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s1: Vale.PPC64LE.Memory.loc -> s2: Vale.PPC64LE.Memory.loc -> Prims.GTot Vale.PPC64LE.Memory.loc | {
"end_col": 34,
"end_line": 95,
"start_col": 23,
"start_line": 95
} |
|
Prims.Tot | val valid_mem_operand64 (addr: int) (t: taint) (s_mem: vale_heap) (layout: vale_heap_layout) : prop0 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t | val valid_mem_operand64 (addr: int) (t: taint) (s_mem: vale_heap) (layout: vale_heap_layout) : prop0
let valid_mem_operand64 (addr: int) (t: taint) (s_mem: vale_heap) (layout: vale_heap_layout) : prop0 = | false | null | false | exists (b: M.buffer64) (index: int). {:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Prims.int",
"Vale.Arch.HeapTypes_s.taint",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Prims.l_Exists",
"Vale.PPC64LE.Memory.buffer64",
"Vale.PPC64LE.Decls.valid_buf_maddr64",
"Vale.PPC64LE.Memory.valid_buffer_read",
"Vale.PPC64LE.Memory.vuint64",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val valid_mem_operand64 (addr: int) (t: taint) (s_mem: vale_heap) (layout: vale_heap_layout) : prop0 | [] | Vale.PPC64LE.Decls.valid_mem_operand64 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
addr: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint ->
s_mem: Vale.PPC64LE.Decls.vale_heap ->
layout: Vale.Arch.HeapImpl.vale_heap_layout
-> Vale.Def.Prop_s.prop0 | {
"end_col": 49,
"end_line": 110,
"start_col": 2,
"start_line": 109
} |
Prims.Tot | val valid_mem_operand128 (addr: int) (t: taint) (s_mem: vale_heap) (layout: vale_heap_layout)
: prop0 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t | val valid_mem_operand128 (addr: int) (t: taint) (s_mem: vale_heap) (layout: vale_heap_layout)
: prop0
let valid_mem_operand128 (addr: int) (t: taint) (s_mem: vale_heap) (layout: vale_heap_layout)
: prop0 = | false | null | false | exists (b: M.buffer128) (index: int). {:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Prims.int",
"Vale.Arch.HeapTypes_s.taint",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Prims.l_Exists",
"Vale.PPC64LE.Memory.buffer128",
"Vale.PPC64LE.Decls.valid_buf_maddr128",
"Vale.PPC64LE.Memory.valid_buffer_read",
"Vale.PPC64LE.Memory.vuint128",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val valid_mem_operand128 (addr: int) (t: taint) (s_mem: vale_heap) (layout: vale_heap_layout)
: prop0 | [] | Vale.PPC64LE.Decls.valid_mem_operand128 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
addr: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint ->
s_mem: Vale.PPC64LE.Decls.vale_heap ->
layout: Vale.Arch.HeapImpl.vale_heap_layout
-> Vale.Def.Prop_s.prop0 | {
"end_col": 50,
"end_line": 114,
"start_col": 2,
"start_line": 113
} |
Prims.GTot | val buffer64_read (b: M.buffer64) (i: int) (h: vale_heap) : GTot nat64 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h | val buffer64_read (b: M.buffer64) (i: int) (h: vale_heap) : GTot nat64
let buffer64_read (b: M.buffer64) (i: int) (h: vale_heap) : GTot nat64 = | false | null | false | M.buffer_read b i h | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial"
] | [
"Vale.PPC64LE.Memory.buffer64",
"Prims.int",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer_read",
"Vale.PPC64LE.Memory.vuint64",
"Vale.PPC64LE.Machine_s.nat64"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val buffer64_read (b: M.buffer64) (i: int) (h: vale_heap) : GTot nat64 | [] | Vale.PPC64LE.Decls.buffer64_read | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b: Vale.PPC64LE.Memory.buffer64 -> i: Prims.int -> h: Vale.PPC64LE.Decls.vale_heap
-> Prims.GTot Vale.PPC64LE.Machine_s.nat64 | {
"end_col": 96,
"end_line": 90,
"start_col": 77,
"start_line": 90
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let locs_disjoint = M.locs_disjoint | let locs_disjoint = | false | null | false | M.locs_disjoint | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Memory.locs_disjoint"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2 | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val locs_disjoint : ls: Prims.list Vale.PPC64LE.Memory.loc -> Vale.Def.Prop_s.prop0 | [] | Vale.PPC64LE.Decls.locs_disjoint | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | ls: Prims.list Vale.PPC64LE.Memory.loc -> Vale.Def.Prop_s.prop0 | {
"end_col": 42,
"end_line": 94,
"start_col": 27,
"start_line": 94
} |
|
Prims.Tot | val valid_buf_maddr128
(addr: int)
(s_mem: vale_heap)
(layout: vale_heap_layout)
(b: M.buffer128)
(index: int)
(t: taint)
: prop0 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index | val valid_buf_maddr128
(addr: int)
(s_mem: vale_heap)
(layout: vale_heap_layout)
(b: M.buffer128)
(index: int)
(t: taint)
: prop0
let valid_buf_maddr128
(addr: int)
(s_mem: vale_heap)
(layout: vale_heap_layout)
(b: M.buffer128)
(index: int)
(t: taint)
: prop0 = | false | null | false | valid_src_addr s_mem b index /\ M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Prims.int",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Vale.PPC64LE.Memory.buffer128",
"Vale.Arch.HeapTypes_s.taint",
"Prims.l_and",
"Vale.PPC64LE.Decls.valid_src_addr",
"Vale.PPC64LE.Memory.vuint128",
"Vale.PPC64LE.Memory.valid_taint_buf128",
"Vale.Arch.HeapImpl.__proj__Mkvale_heap_layout__item__vl_taint",
"Prims.eq2",
"Prims.op_Addition",
"Vale.PPC64LE.Memory.buffer_addr",
"FStar.Mul.op_Star",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val valid_buf_maddr128
(addr: int)
(s_mem: vale_heap)
(layout: vale_heap_layout)
(b: M.buffer128)
(index: int)
(t: taint)
: prop0 | [] | Vale.PPC64LE.Decls.valid_buf_maddr128 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
addr: Prims.int ->
s_mem: Vale.PPC64LE.Decls.vale_heap ->
layout: Vale.Arch.HeapImpl.vale_heap_layout ->
b: Vale.PPC64LE.Memory.buffer128 ->
index: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint
-> Vale.Def.Prop_s.prop0 | {
"end_col": 44,
"end_line": 106,
"start_col": 2,
"start_line": 104
} |
Prims.Tot | val va_get_xer (s: va_state) : xer_t | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_get_xer (s:va_state) : xer_t = s.xer | val va_get_xer (s: va_state) : xer_t
let va_get_xer (s: va_state) : xer_t = | false | null | false | s.xer | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__xer",
"Vale.PPC64LE.Machine_s.xer_t"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_get_xer (s: va_state) : xer_t | [] | Vale.PPC64LE.Decls.va_get_xer | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Machine_s.xer_t | {
"end_col": 62,
"end_line": 145,
"start_col": 57,
"start_line": 145
} |
Prims.GTot | val modifies_mem (s: M.loc) (h1 h2: vale_heap) : GTot prop0 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2 | val modifies_mem (s: M.loc) (h1 h2: vale_heap) : GTot prop0
let modifies_mem (s: M.loc) (h1 h2: vale_heap) : GTot prop0 = | false | null | false | M.modifies s h1 h2 | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial"
] | [
"Vale.PPC64LE.Memory.loc",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.modifies",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_mem (s: M.loc) (h1 h2: vale_heap) : GTot prop0 | [] | Vale.PPC64LE.Decls.modifies_mem | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Vale.PPC64LE.Memory.loc -> h1: Vale.PPC64LE.Decls.vale_heap -> h2: Vale.PPC64LE.Decls.vale_heap
-> Prims.GTot Vale.Def.Prop_s.prop0 | {
"end_col": 85,
"end_line": 92,
"start_col": 67,
"start_line": 92
} |
Prims.Tot | val va_op_reg_opr_reg (r: reg) : reg_opr | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_op_reg_opr_reg (r:reg) : reg_opr = r | val va_op_reg_opr_reg (r: reg) : reg_opr
let va_op_reg_opr_reg (r: reg) : reg_opr = | false | null | false | r | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.reg",
"Vale.PPC64LE.Decls.reg_opr"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_op_reg_opr_reg (r: reg) : reg_opr | [] | Vale.PPC64LE.Decls.va_op_reg_opr_reg | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | r: Vale.PPC64LE.Machine_s.reg -> Vale.PPC64LE.Decls.reg_opr | {
"end_col": 62,
"end_line": 132,
"start_col": 61,
"start_line": 132
} |
Prims.Tot | val va_get_ok (s: va_state) : bool | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_get_ok (s:va_state) : bool = s.ok | val va_get_ok (s: va_state) : bool
let va_get_ok (s: va_state) : bool = | false | null | false | s.ok | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ok",
"Prims.bool"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t) | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_get_ok (s: va_state) : bool | [] | Vale.PPC64LE.Decls.va_get_ok | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Vale.PPC64LE.Decls.va_state -> Prims.bool | {
"end_col": 59,
"end_line": 143,
"start_col": 55,
"start_line": 143
} |
Prims.Tot | val va_get_reg (r: reg) (s: va_state) : nat64 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s | val va_get_reg (r: reg) (s: va_state) : nat64
let va_get_reg (r: reg) (s: va_state) : nat64 = | false | null | false | eval_reg r s | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.reg",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.State.eval_reg",
"Vale.PPC64LE.Machine_s.nat64"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0 | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_get_reg (r: reg) (s: va_state) : nat64 | [] | Vale.PPC64LE.Decls.va_get_reg | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | r: Vale.PPC64LE.Machine_s.reg -> s: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Machine_s.nat64 | {
"end_col": 77,
"end_line": 146,
"start_col": 65,
"start_line": 146
} |
Prims.Tot | val valid_addr_mem (r: reg) (n: int) (s: state) : prop0 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s | val valid_addr_mem (r: reg) (n: int) (s: state) : prop0
let valid_addr_mem (r: reg) (n: int) (s: state) : prop0 = | false | null | false | valid_mem ({ address = r; offset = n }) s | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.reg",
"Prims.int",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.State.valid_mem",
"Vale.PPC64LE.Machine_s.Mkmaddr",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val valid_addr_mem (r: reg) (n: int) (s: state) : prop0 | [] | Vale.PPC64LE.Decls.valid_addr_mem | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | r: Vale.PPC64LE.Machine_s.reg -> n: Prims.int -> s: Vale.PPC64LE.State.state
-> Vale.Def.Prop_s.prop0 | {
"end_col": 99,
"end_line": 96,
"start_col": 62,
"start_line": 96
} |
Prims.Tot | val va_get_stackTaint (s: va_state) : M.memtaint | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint | val va_get_stackTaint (s: va_state) : M.memtaint
let va_get_stackTaint (s: va_state) : M.memtaint = | false | null | false | s.ms_stackTaint | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stackTaint",
"Vale.PPC64LE.Memory.memtaint"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_get_stackTaint (s: va_state) : M.memtaint | [] | Vale.PPC64LE.Decls.va_get_stackTaint | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Memory.memtaint | {
"end_col": 84,
"end_line": 152,
"start_col": 69,
"start_line": 152
} |
Prims.Tot | val va_const_cmp (n: imm16) : cmp_opr | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_const_cmp (n:imm16) : cmp_opr = CImm n | val va_const_cmp (n: imm16) : cmp_opr
let va_const_cmp (n: imm16) : cmp_opr = | false | null | false | CImm n | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.imm16",
"Vale.PPC64LE.Machine_s.CImm",
"Vale.PPC64LE.Machine_s.cmp_opr"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_const_cmp (n: imm16) : cmp_opr | [] | Vale.PPC64LE.Decls.va_const_cmp | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | n: Vale.PPC64LE.Machine_s.imm16 -> Vale.PPC64LE.Machine_s.cmp_opr | {
"end_col": 64,
"end_line": 135,
"start_col": 58,
"start_line": 135
} |
Prims.GTot | val va_eval_Mem64 (s: va_state) (m: maddr) : GTot nat64 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s | val va_eval_Mem64 (s: va_state) (m: maddr) : GTot nat64
let va_eval_Mem64 (s: va_state) (m: maddr) : GTot nat64 = | false | null | false | eval_mem (eval_maddr m s) s | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial"
] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Machine_s.maddr",
"Vale.PPC64LE.State.eval_mem",
"Vale.PPC64LE.State.eval_maddr",
"Vale.PPC64LE.Machine_s.nat64"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_eval_Mem64 (s: va_state) (m: maddr) : GTot nat64 | [] | Vale.PPC64LE.Decls.va_eval_Mem64 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Vale.PPC64LE.Decls.va_state -> m: Vale.PPC64LE.Machine_s.maddr
-> Prims.GTot Vale.PPC64LE.Machine_s.nat64 | {
"end_col": 107,
"end_line": 156,
"start_col": 80,
"start_line": 156
} |
Prims.GTot | val va_eval_reg (s: va_state) (r: reg) : GTot nat64 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s | val va_eval_reg (s: va_state) (r: reg) : GTot nat64
let va_eval_reg (s: va_state) (r: reg) : GTot nat64 = | false | null | false | eval_reg r s | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial"
] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Machine_s.reg",
"Vale.PPC64LE.State.eval_reg",
"Vale.PPC64LE.Machine_s.nat64"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_eval_reg (s: va_state) (r: reg) : GTot nat64 | [] | Vale.PPC64LE.Decls.va_eval_reg | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Vale.PPC64LE.Decls.va_state -> r: Vale.PPC64LE.Machine_s.reg
-> Prims.GTot Vale.PPC64LE.Machine_s.nat64 | {
"end_col": 93,
"end_line": 155,
"start_col": 81,
"start_line": 155
} |
Prims.GTot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b | let loc_buffer (#t: M.base_typ) (b: M.buffer t) = | false | null | false | M.loc_buffer #t b | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial"
] | [
"Vale.Arch.HeapTypes_s.base_typ",
"Vale.PPC64LE.Memory.buffer",
"Vale.PPC64LE.Memory.loc_buffer",
"Vale.PPC64LE.Memory.loc"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_buffer : b: Vale.PPC64LE.Memory.buffer t -> Prims.GTot Vale.PPC64LE.Memory.loc | [] | Vale.PPC64LE.Decls.loc_buffer | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b: Vale.PPC64LE.Memory.buffer t -> Prims.GTot Vale.PPC64LE.Memory.loc | {
"end_col": 71,
"end_line": 93,
"start_col": 54,
"start_line": 93
} |
|
Prims.GTot | val va_eval_cmp_opr (s: va_state) (o: cmp_opr) : GTot nat64 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s | val va_eval_cmp_opr (s: va_state) (o: cmp_opr) : GTot nat64
let va_eval_cmp_opr (s: va_state) (o: cmp_opr) : GTot nat64 = | false | null | false | eval_cmp_opr o s | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial"
] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Machine_s.cmp_opr",
"Vale.PPC64LE.State.eval_cmp_opr",
"Vale.PPC64LE.Machine_s.nat64"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_eval_cmp_opr (s: va_state) (o: cmp_opr) : GTot nat64 | [] | Vale.PPC64LE.Decls.va_eval_cmp_opr | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Vale.PPC64LE.Decls.va_state -> o: Vale.PPC64LE.Machine_s.cmp_opr
-> Prims.GTot Vale.PPC64LE.Machine_s.nat64 | {
"end_col": 105,
"end_line": 158,
"start_col": 89,
"start_line": 158
} |
Prims.GTot | val va_eval_reg_opr (s: va_state) (r: reg_opr) : GTot nat64 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s | val va_eval_reg_opr (s: va_state) (r: reg_opr) : GTot nat64
let va_eval_reg_opr (s: va_state) (r: reg_opr) : GTot nat64 = | false | null | false | eval_reg r s | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial"
] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.reg_opr",
"Vale.PPC64LE.State.eval_reg",
"Vale.PPC64LE.Machine_s.nat64"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_eval_reg_opr (s: va_state) (r: reg_opr) : GTot nat64 | [] | Vale.PPC64LE.Decls.va_eval_reg_opr | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Vale.PPC64LE.Decls.va_state -> r: Vale.PPC64LE.Decls.reg_opr
-> Prims.GTot Vale.PPC64LE.Machine_s.nat64 | {
"end_col": 98,
"end_line": 157,
"start_col": 86,
"start_line": 157
} |
Prims.Tot | val coerce (#b #a: Type) (x: a{a == b}) : b | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let coerce (#b #a:Type) (x:a{a == b}) : b = x | val coerce (#b #a: Type) (x: a{a == b}) : b
let coerce (#b #a: Type) (x: a{a == b}) : b = | false | null | false | x | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Prims.eq2"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val coerce (#b #a: Type) (x: a{a == b}) : b | [] | Vale.PPC64LE.Decls.coerce | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: a{a == b} -> b | {
"end_col": 52,
"end_line": 22,
"start_col": 51,
"start_line": 22
} |
Prims.Tot | val va_if (#a: Type) (b: bool) (x: (_: unit{b} -> a)) (y: (_: unit{~b} -> a)) : a | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y () | val va_if (#a: Type) (b: bool) (x: (_: unit{b} -> a)) (y: (_: unit{~b} -> a)) : a
let va_if (#a: Type) (b: bool) (x: (_: unit{b} -> a)) (y: (_: unit{~b} -> a)) : a = | false | null | false | if b then x () else y () | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Prims.bool",
"Prims.unit",
"Prims.b2t",
"Prims.l_not"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_if (#a: Type) (b: bool) (x: (_: unit{b} -> a)) (y: (_: unit{~b} -> a)) : a | [] | Vale.PPC64LE.Decls.va_if | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b: Prims.bool -> x: (_: Prims.unit{b} -> a) -> y: (_: Prims.unit{~b} -> a) -> a | {
"end_col": 26,
"end_line": 48,
"start_col": 2,
"start_line": 48
} |
Prims.Tot | val va_get_mem_heaplet (n: heaplet_id) (s: va_state) : vale_heap | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n | val va_get_mem_heaplet (n: heaplet_id) (s: va_state) : vale_heap
let va_get_mem_heaplet (n: heaplet_id) (s: va_state) : vale_heap = | false | null | false | Map16.sel (coerce s.ms_heap).vf_heaplets n | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.heaplet_id",
"Vale.PPC64LE.Decls.va_state",
"Vale.Lib.Map16.sel",
"Vale.Arch.HeapImpl.vale_heap",
"Vale.Arch.HeapImpl.__proj__Mkvale_full_heap__item__vf_heaplets",
"Vale.PPC64LE.Decls.coerce",
"Vale.Arch.HeapImpl.vale_full_heap",
"Vale.Arch.Heap.heap_impl",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_heap",
"Vale.PPC64LE.Decls.vale_heap"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap) | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_get_mem_heaplet (n: heaplet_id) (s: va_state) : vale_heap | [] | Vale.PPC64LE.Decls.va_get_mem_heaplet | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | n: Vale.PPC64LE.Decls.heaplet_id -> s: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Decls.vale_heap | {
"end_col": 126,
"end_line": 150,
"start_col": 84,
"start_line": 150
} |
Prims.Tot | val va_get_mem_layout (s: va_state) : vale_heap_layout | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout | val va_get_mem_layout (s: va_state) : vale_heap_layout
let va_get_mem_layout (s: va_state) : vale_heap_layout = | false | null | false | (coerce s.ms_heap).vf_layout | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.Arch.HeapImpl.__proj__Mkvale_full_heap__item__vf_layout",
"Vale.PPC64LE.Decls.coerce",
"Vale.Arch.HeapImpl.vale_full_heap",
"Vale.Arch.Heap.heap_impl",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_heap",
"Vale.Arch.HeapImpl.vale_heap_layout"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_get_mem_layout (s: va_state) : vale_heap_layout | [] | Vale.PPC64LE.Decls.va_get_mem_layout | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Vale.PPC64LE.Decls.va_state -> Vale.Arch.HeapImpl.vale_heap_layout | {
"end_col": 103,
"end_line": 149,
"start_col": 75,
"start_line": 149
} |
Prims.Ghost | val va_tl (cs: va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs | val va_tl (cs: va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs)
let va_tl (cs: va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = | false | null | false | Cons?.tl cs | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [] | [
"Vale.PPC64LE.Decls.va_codes",
"Prims.__proj__Cons__item__tl",
"Vale.PPC64LE.Decls.va_code",
"Prims.b2t",
"Prims.uu___is_Cons",
"Prims.eq2",
"Prims.list"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_tl (cs: va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) | [] | Vale.PPC64LE.Decls.va_tl | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | cs: Vale.PPC64LE.Decls.va_codes -> Prims.Ghost Vale.PPC64LE.Decls.va_codes | {
"end_col": 112,
"end_line": 58,
"start_col": 101,
"start_line": 58
} |
Prims.Tot | val va_update_mem_layout (sM sK: va_state) : va_state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK | val va_update_mem_layout (sM sK: va_state) : va_state
let va_update_mem_layout (sM sK: va_state) : va_state = | false | null | false | va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_upd_mem_layout",
"Vale.Arch.HeapImpl.__proj__Mkvale_full_heap__item__vf_layout",
"Vale.PPC64LE.Decls.coerce",
"Vale.Arch.HeapImpl.vale_full_heap",
"Vale.Arch.Heap.heap_impl",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_heap"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_update_mem_layout (sM sK: va_state) : va_state | [] | Vale.PPC64LE.Decls.va_update_mem_layout | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | sM: Vale.PPC64LE.Decls.va_state -> sK: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Decls.va_state | {
"end_col": 135,
"end_line": 191,
"start_col": 85,
"start_line": 191
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s | let va_is_src_Mem64 (m: maddr) (s: va_state) = | false | null | false | valid_mem m s | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.maddr",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.State.valid_mem",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_is_src_Mem64 : m: Vale.PPC64LE.Machine_s.maddr -> s: Vale.PPC64LE.Decls.va_state -> Vale.Def.Prop_s.prop0 | [] | Vale.PPC64LE.Decls.va_is_src_Mem64 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | m: Vale.PPC64LE.Machine_s.maddr -> s: Vale.PPC64LE.Decls.va_state -> Vale.Def.Prop_s.prop0 | {
"end_col": 77,
"end_line": 165,
"start_col": 64,
"start_line": 165
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True | let va_is_src_reg_opr (r: reg_opr) (s: va_state) = | false | null | false | True | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.reg_opr",
"Vale.PPC64LE.Decls.va_state",
"Prims.l_True",
"Prims.logical"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_is_src_reg_opr : r: Vale.PPC64LE.Decls.reg_opr -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | [] | Vale.PPC64LE.Decls.va_is_src_reg_opr | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | r: Vale.PPC64LE.Decls.reg_opr -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | {
"end_col": 72,
"end_line": 163,
"start_col": 68,
"start_line": 163
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True | let va_is_src_heaplet (h: heaplet_id) (s: va_state) = | false | null | false | True | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.heaplet_id",
"Vale.PPC64LE.Decls.va_state",
"Prims.l_True",
"Prims.logical"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_is_src_heaplet : h: Vale.PPC64LE.Decls.heaplet_id -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | [] | Vale.PPC64LE.Decls.va_is_src_heaplet | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | h: Vale.PPC64LE.Decls.heaplet_id -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | {
"end_col": 75,
"end_line": 169,
"start_col": 71,
"start_line": 169
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s | let va_is_dst_Mem64 (m: maddr) (s: va_state) = | false | null | false | valid_mem m s | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.maddr",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.State.valid_mem",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_is_dst_Mem64 : m: Vale.PPC64LE.Machine_s.maddr -> s: Vale.PPC64LE.Decls.va_state -> Vale.Def.Prop_s.prop0 | [] | Vale.PPC64LE.Decls.va_is_dst_Mem64 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | m: Vale.PPC64LE.Machine_s.maddr -> s: Vale.PPC64LE.Decls.va_state -> Vale.Def.Prop_s.prop0 | {
"end_col": 77,
"end_line": 166,
"start_col": 64,
"start_line": 166
} |
|
Prims.Tot | val va_upd_reg (r: reg) (v: nat64) (s: state) : state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s | val va_upd_reg (r: reg) (v: nat64) (s: state) : state
let va_upd_reg (r: reg) (v: nat64) (s: state) : state = | false | null | false | update_reg r v s | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.reg",
"Vale.PPC64LE.Machine_s.nat64",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.State.update_reg"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 } | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_upd_reg (r: reg) (v: nat64) (s: state) : state | [] | Vale.PPC64LE.Decls.va_upd_reg | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | r: Vale.PPC64LE.Machine_s.reg -> v: Vale.PPC64LE.Machine_s.nat64 -> s: Vale.PPC64LE.State.state
-> Vale.PPC64LE.State.state | {
"end_col": 81,
"end_line": 175,
"start_col": 65,
"start_line": 175
} |
Prims.Tot | val va_upd_stack (stack: SI.vale_stack) (s: state) : state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) } | val va_upd_stack (stack: SI.vale_stack) (s: state) : state
let va_upd_stack (stack: SI.vale_stack) (s: state) : state = | false | null | false | { s with ms_stack = (VSS.stack_to_s stack) } | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Stack_i.vale_stack",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.Machine_s.Mkstate",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ok",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__regs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__vecs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__cr0",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__xer",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_heap",
"Vale.PPC64LE.Stack_Sems.stack_to_s",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stackTaint"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state = | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_upd_stack (stack: SI.vale_stack) (s: state) : state | [] | Vale.PPC64LE.Decls.va_upd_stack | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | stack: Vale.PPC64LE.Stack_i.vale_stack -> s: Vale.PPC64LE.State.state -> Vale.PPC64LE.State.state | {
"end_col": 113,
"end_line": 181,
"start_col": 73,
"start_line": 181
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True | let va_is_dst_reg_opr (r: reg_opr) (s: va_state) = | false | null | false | True | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.reg_opr",
"Vale.PPC64LE.Decls.va_state",
"Prims.l_True",
"Prims.logical"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_is_dst_reg_opr : r: Vale.PPC64LE.Decls.reg_opr -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | [] | Vale.PPC64LE.Decls.va_is_dst_reg_opr | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | r: Vale.PPC64LE.Decls.reg_opr -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | {
"end_col": 72,
"end_line": 164,
"start_col": 68,
"start_line": 164
} |
|
Prims.Tot | val va_update_xer (sM sK: va_state) : va_state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK | val va_update_xer (sM sK: va_state) : va_state
let va_update_xer (sM sK: va_state) : va_state = | false | null | false | va_upd_xer sM.xer sK | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_upd_xer",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__xer"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_update_xer (sM sK: va_state) : va_state | [] | Vale.PPC64LE.Decls.va_update_xer | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | sM: Vale.PPC64LE.Decls.va_state -> sK: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Decls.va_state | {
"end_col": 98,
"end_line": 187,
"start_col": 78,
"start_line": 187
} |
Prims.Tot | val va_upd_vec (x: vec) (v: quad32) (s: state) : state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s | val va_upd_vec (x: vec) (v: quad32) (s: state) : state
let va_upd_vec (x: vec) (v: quad32) (s: state) : state = | false | null | false | update_vec x v s | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.vec",
"Vale.PPC64LE.Machine_s.quad32",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.State.update_vec"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer } | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_upd_vec (x: vec) (v: quad32) (s: state) : state | [] | Vale.PPC64LE.Decls.va_upd_vec | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: Vale.PPC64LE.Machine_s.vec -> v: Vale.PPC64LE.Machine_s.quad32 -> s: Vale.PPC64LE.State.state
-> Vale.PPC64LE.State.state | {
"end_col": 82,
"end_line": 176,
"start_col": 66,
"start_line": 176
} |
Prims.Tot | val va_upd_stackTaint (stackTaint: M.memtaint) (s: state) : state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint } | val va_upd_stackTaint (stackTaint: M.memtaint) (s: state) : state
let va_upd_stackTaint (stackTaint: M.memtaint) (s: state) : state = | false | null | false | { s with ms_stackTaint = stackTaint } | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Memory.memtaint",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.Machine_s.Mkstate",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ok",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__regs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__vecs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__cr0",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__xer",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_heap",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stack"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) } | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_upd_stackTaint (stackTaint: M.memtaint) (s: state) : state | [] | Vale.PPC64LE.Decls.va_upd_stackTaint | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | stackTaint: Vale.PPC64LE.Memory.memtaint -> s: Vale.PPC64LE.State.state -> Vale.PPC64LE.State.state | {
"end_col": 113,
"end_line": 182,
"start_col": 80,
"start_line": 182
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_value_reg_opr = nat64 | let va_value_reg_opr = | false | null | false | nat64 | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.nat64"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_value_reg_opr : Type0 | [] | Vale.PPC64LE.Decls.va_value_reg_opr | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type0 | {
"end_col": 35,
"end_line": 215,
"start_col": 30,
"start_line": 215
} |
|
Prims.Tot | val va_update_operand_vec_opr (x: vec) (sM sK: va_state) : va_state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK | val va_update_operand_vec_opr (x: vec) (sM sK: va_state) : va_state
let va_update_operand_vec_opr (x: vec) (sM sK: va_state) : va_state = | false | null | false | va_update_vec x sM sK | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.vec",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_update_vec"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_update_operand_vec_opr (x: vec) (sM sK: va_state) : va_state | [] | Vale.PPC64LE.Decls.va_update_operand_vec_opr | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: Vale.PPC64LE.Machine_s.vec -> sM: Vale.PPC64LE.Decls.va_state -> sK: Vale.PPC64LE.Decls.va_state
-> Vale.PPC64LE.Decls.va_state | {
"end_col": 23,
"end_line": 209,
"start_col": 2,
"start_line": 209
} |
Prims.Tot | val va_update_operand_Mem64 (m: maddr) (sM sK: va_state) : va_state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK | val va_update_operand_Mem64 (m: maddr) (sM sK: va_state) : va_state
let va_update_operand_Mem64 (m: maddr) (sM sK: va_state) : va_state = | false | null | false | va_update_mem sM sK | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.maddr",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_update_mem"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_update_operand_Mem64 (m: maddr) (sM sK: va_state) : va_state | [] | Vale.PPC64LE.Decls.va_update_operand_Mem64 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
m: Vale.PPC64LE.Machine_s.maddr ->
sM: Vale.PPC64LE.Decls.va_state ->
sK: Vale.PPC64LE.Decls.va_state
-> Vale.PPC64LE.Decls.va_state | {
"end_col": 21,
"end_line": 205,
"start_col": 2,
"start_line": 205
} |
Prims.Tot | val va_update_vec (x: vec) (sM sK: va_state) : va_state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK | val va_update_vec (x: vec) (sM sK: va_state) : va_state
let va_update_vec (x: vec) (sM sK: va_state) : va_state = | false | null | false | va_upd_vec x (eval_vec x sM) sK | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.vec",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_upd_vec",
"Vale.PPC64LE.State.eval_vec"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_update_vec (x: vec) (sM sK: va_state) : va_state | [] | Vale.PPC64LE.Decls.va_update_vec | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: Vale.PPC64LE.Machine_s.vec -> sM: Vale.PPC64LE.Decls.va_state -> sK: Vale.PPC64LE.Decls.va_state
-> Vale.PPC64LE.Decls.va_state | {
"end_col": 33,
"end_line": 195,
"start_col": 2,
"start_line": 195
} |
Prims.GTot | val buffers_readable (h: vale_heap) (l: list M.buffer64) : GTot prop0 (decreases l) | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec buffers_readable (h: vale_heap) (l: list M.buffer64) : GTot prop0 (decreases l) =
match l with
| [] -> True
| b :: l' -> buffer_readable h b /\ buffers_readable h l' | val buffers_readable (h: vale_heap) (l: list M.buffer64) : GTot prop0 (decreases l)
let rec buffers_readable (h: vale_heap) (l: list M.buffer64) : GTot prop0 (decreases l) = | false | null | false | match l with
| [] -> True
| b :: l' -> buffer_readable h b /\ buffers_readable h l' | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial",
""
] | [
"Vale.PPC64LE.Decls.vale_heap",
"Prims.list",
"Vale.PPC64LE.Memory.buffer64",
"Prims.l_True",
"Prims.l_and",
"Vale.PPC64LE.Decls.buffer_readable",
"Vale.PPC64LE.Memory.vuint64",
"Vale.PPC64LE.Decls.buffers_readable",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes
[@va_qattr] unfold let va_CNil () : va_codes = []
[@va_qattr] unfold let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl
// Constructors for va_code
unfold let va_Block (block:va_codes) : va_code = Block block
unfold let va_IfElse (ifCond:ocmp) (ifTrue:va_code) (ifFalse:va_code) : va_code = IfElse ifCond ifTrue ifFalse
unfold let va_While (whileCond:ocmp) (whileBody:va_code) : va_code = While whileCond whileBody
val va_cmp_eq (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ne (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_le (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ge (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_lt (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_gt (o1:cmp_opr) (o2:cmp_opr) : ocmp
unfold let va_get_block (c:va_code{Block? c}) : va_codes = Block?.block c
unfold let va_get_ifCond (c:va_code{IfElse? c}) : ocmp = IfElse?.ifCond c
unfold let va_get_ifTrue (c:va_code{IfElse? c}) : va_code = IfElse?.ifTrue c
unfold let va_get_ifFalse (c:va_code{IfElse? c}) : va_code = IfElse?.ifFalse c
unfold let va_get_whileCond (c:va_code{While? c}) : ocmp = While?.whileCond c
unfold let va_get_whileBody (c:va_code{While? c}) : va_code = While?.whileBody c
// Map syntax
// syntax for map accesses, m.[key] and m.[key] <- value
type map (key:eqtype) (value:Type) = Map.t key value
let (.[]) = Map.sel
let (.[]<-) = Map.upd
(** Memory framing **)
(*
unfold let in_mem (addr:int) (m:mem) : bool = m `Map.contains` addr
let disjoint (ptr1:int) (num_bytes1:int) (ptr2:int) (num_bytes2:int) =
ptr1 + num_bytes1 <= ptr2 \/ ptr2 + num_bytes2 <= ptr1
let validSrcAddrs (mem:mem) (addr:int) (size:int) (num_bytes:int) =
size == 64 /\
(forall (a:int) . {:pattern (mem `Map.contains` a)} addr <= a && a < addr+num_bytes && (a - addr) % 8 = 0 ==> mem `Map.contains` a)
let memModified (old_mem:mem) (new_mem:mem) (ptr:int) (num_bytes) =
(forall (a:int) . {:pattern (new_mem `Map.contains` a)} old_mem `Map.contains` a <==> new_mem `Map.contains` a) /\
(forall (a:int) . {:pattern (new_mem.[a]) \/ Map.sel new_mem a} a < ptr || a >= ptr + num_bytes ==> old_mem.[a] == new_mem.[ a])
*)
(** Convenient memory-related functions **) | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val buffers_readable (h: vale_heap) (l: list M.buffer64) : GTot prop0 (decreases l) | [
"recursion"
] | Vale.PPC64LE.Decls.buffers_readable | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | h: Vale.PPC64LE.Decls.vale_heap -> l: Prims.list Vale.PPC64LE.Memory.buffer64
-> Prims.GTot Vale.Def.Prop_s.prop0 | {
"end_col": 62,
"end_line": 283,
"start_col": 4,
"start_line": 281
} |
Prims.Tot | val va_update_operand_heaplet (h: heaplet_id) (sM sK: va_state) : va_state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK | val va_update_operand_heaplet (h: heaplet_id) (sM sK: va_state) : va_state
let va_update_operand_heaplet (h: heaplet_id) (sM sK: va_state) : va_state = | false | null | false | va_update_mem_heaplet h sM sK | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.heaplet_id",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_update_mem_heaplet"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_update_operand_heaplet (h: heaplet_id) (sM sK: va_state) : va_state | [] | Vale.PPC64LE.Decls.va_update_operand_heaplet | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
h: Vale.PPC64LE.Decls.heaplet_id ->
sM: Vale.PPC64LE.Decls.va_state ->
sK: Vale.PPC64LE.Decls.va_state
-> Vale.PPC64LE.Decls.va_state | {
"end_col": 31,
"end_line": 213,
"start_col": 2,
"start_line": 213
} |
Prims.Tot | val va_CCons (hd: va_code) (tl: va_codes) : va_codes | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl | val va_CCons (hd: va_code) (tl: va_codes) : va_codes
let va_CCons (hd: va_code) (tl: va_codes) : va_codes = | false | null | false | hd :: tl | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_code",
"Vale.PPC64LE.Decls.va_codes",
"Prims.Cons"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_CCons (hd: va_code) (tl: va_codes) : va_codes | [] | Vale.PPC64LE.Decls.va_CCons | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | hd: Vale.PPC64LE.Decls.va_code -> tl: Vale.PPC64LE.Decls.va_codes -> Vale.PPC64LE.Decls.va_codes | {
"end_col": 78,
"end_line": 235,
"start_col": 72,
"start_line": 235
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_value_vec_opr = quad32 | let va_value_vec_opr = | false | null | false | quad32 | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.quad32"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_value_vec_opr : Prims.eqtype | [] | Vale.PPC64LE.Decls.va_value_vec_opr | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Prims.eqtype | {
"end_col": 36,
"end_line": 216,
"start_col": 30,
"start_line": 216
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_value_heaplet = vale_heap | let va_value_heaplet = | false | null | false | vale_heap | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.vale_heap"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64 | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_value_heaplet : Type | [] | Vale.PPC64LE.Decls.va_value_heaplet | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type | {
"end_col": 39,
"end_line": 217,
"start_col": 30,
"start_line": 217
} |
|
Prims.Tot | val va_CNil: Prims.unit -> va_codes | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_CNil () : va_codes = [] | val va_CNil: Prims.unit -> va_codes
let va_CNil () : va_codes = | false | null | false | [] | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Prims.unit",
"Prims.Nil",
"Vale.PPC64LE.Decls.va_code",
"Vale.PPC64LE.Decls.va_codes"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= () | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_CNil: Prims.unit -> va_codes | [] | Vale.PPC64LE.Decls.va_CNil | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | _: Prims.unit -> Vale.PPC64LE.Decls.va_codes | {
"end_col": 49,
"end_line": 234,
"start_col": 47,
"start_line": 234
} |
Prims.Tot | val va_upd_operand_heaplet (n: heaplet_id) (h: vale_heap) (s: state) : state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s | val va_upd_operand_heaplet (n: heaplet_id) (h: vale_heap) (s: state) : state
let va_upd_operand_heaplet (n: heaplet_id) (h: vale_heap) (s: state) : state = | false | null | false | va_upd_mem_heaplet n h s | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.heaplet_id",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.Decls.va_upd_mem_heaplet"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_upd_operand_heaplet (n: heaplet_id) (h: vale_heap) (s: state) : state | [] | Vale.PPC64LE.Decls.va_upd_operand_heaplet | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | n: Vale.PPC64LE.Decls.heaplet_id -> h: Vale.PPC64LE.Decls.vale_heap -> s: Vale.PPC64LE.State.state
-> Vale.PPC64LE.State.state | {
"end_col": 26,
"end_line": 222,
"start_col": 2,
"start_line": 222
} |
Prims.Tot | val va_IfElse (ifCond: ocmp) (ifTrue ifFalse: va_code) : va_code | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_IfElse (ifCond:ocmp) (ifTrue:va_code) (ifFalse:va_code) : va_code = IfElse ifCond ifTrue ifFalse | val va_IfElse (ifCond: ocmp) (ifTrue ifFalse: va_code) : va_code
let va_IfElse (ifCond: ocmp) (ifTrue ifFalse: va_code) : va_code = | false | null | false | IfElse ifCond ifTrue ifFalse | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.ocmp",
"Vale.PPC64LE.Decls.va_code",
"Vale.PPC64LE.Machine_s.IfElse",
"Vale.PPC64LE.Decls.ins"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes
[@va_qattr] unfold let va_CNil () : va_codes = []
[@va_qattr] unfold let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl
// Constructors for va_code | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_IfElse (ifCond: ocmp) (ifTrue ifFalse: va_code) : va_code | [] | Vale.PPC64LE.Decls.va_IfElse | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
ifCond: Vale.PPC64LE.Decls.ocmp ->
ifTrue: Vale.PPC64LE.Decls.va_code ->
ifFalse: Vale.PPC64LE.Decls.va_code
-> Vale.PPC64LE.Decls.va_code | {
"end_col": 110,
"end_line": 239,
"start_col": 82,
"start_line": 239
} |
Prims.Tot | val va_upd_operand_vec_opr (x: vec) (v: quad32) (s: state) : state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s | val va_upd_operand_vec_opr (x: vec) (v: quad32) (s: state) : state
let va_upd_operand_vec_opr (x: vec) (v: quad32) (s: state) : state = | false | null | false | va_upd_vec x v s | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.vec",
"Vale.PPC64LE.Machine_s.quad32",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.Decls.va_upd_vec"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_upd_operand_vec_opr (x: vec) (v: quad32) (s: state) : state | [] | Vale.PPC64LE.Decls.va_upd_operand_vec_opr | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: Vale.PPC64LE.Machine_s.vec -> v: Vale.PPC64LE.Machine_s.quad32 -> s: Vale.PPC64LE.State.state
-> Vale.PPC64LE.State.state | {
"end_col": 94,
"end_line": 220,
"start_col": 78,
"start_line": 220
} |
Prims.Tot | val va_get_whileCond (c: va_code{While? c}) : ocmp | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_get_whileCond (c:va_code{While? c}) : ocmp = While?.whileCond c | val va_get_whileCond (c: va_code{While? c}) : ocmp
let va_get_whileCond (c: va_code{While? c}) : ocmp = | false | null | false | While?.whileCond c | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_code",
"Prims.b2t",
"Vale.PPC64LE.Machine_s.uu___is_While",
"Vale.PPC64LE.Decls.ins",
"Vale.PPC64LE.Decls.ocmp",
"Vale.PPC64LE.Machine_s.__proj__While__item__whileCond"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes
[@va_qattr] unfold let va_CNil () : va_codes = []
[@va_qattr] unfold let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl
// Constructors for va_code
unfold let va_Block (block:va_codes) : va_code = Block block
unfold let va_IfElse (ifCond:ocmp) (ifTrue:va_code) (ifFalse:va_code) : va_code = IfElse ifCond ifTrue ifFalse
unfold let va_While (whileCond:ocmp) (whileBody:va_code) : va_code = While whileCond whileBody
val va_cmp_eq (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ne (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_le (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ge (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_lt (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_gt (o1:cmp_opr) (o2:cmp_opr) : ocmp
unfold let va_get_block (c:va_code{Block? c}) : va_codes = Block?.block c
unfold let va_get_ifCond (c:va_code{IfElse? c}) : ocmp = IfElse?.ifCond c
unfold let va_get_ifTrue (c:va_code{IfElse? c}) : va_code = IfElse?.ifTrue c | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_get_whileCond (c: va_code{While? c}) : ocmp | [] | Vale.PPC64LE.Decls.va_get_whileCond | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | c: Vale.PPC64LE.Decls.va_code{While? c} -> Vale.PPC64LE.Decls.ocmp | {
"end_col": 77,
"end_line": 253,
"start_col": 59,
"start_line": 253
} |
Prims.Tot | val va_get_ifFalse (c: va_code{IfElse? c}) : va_code | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_get_ifFalse (c:va_code{IfElse? c}) : va_code = IfElse?.ifFalse c | val va_get_ifFalse (c: va_code{IfElse? c}) : va_code
let va_get_ifFalse (c: va_code{IfElse? c}) : va_code = | false | null | false | IfElse?.ifFalse c | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_code",
"Prims.b2t",
"Vale.PPC64LE.Machine_s.uu___is_IfElse",
"Vale.PPC64LE.Decls.ins",
"Vale.PPC64LE.Decls.ocmp",
"Vale.PPC64LE.Machine_s.__proj__IfElse__item__ifFalse"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes
[@va_qattr] unfold let va_CNil () : va_codes = []
[@va_qattr] unfold let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl
// Constructors for va_code
unfold let va_Block (block:va_codes) : va_code = Block block
unfold let va_IfElse (ifCond:ocmp) (ifTrue:va_code) (ifFalse:va_code) : va_code = IfElse ifCond ifTrue ifFalse
unfold let va_While (whileCond:ocmp) (whileBody:va_code) : va_code = While whileCond whileBody
val va_cmp_eq (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ne (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_le (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ge (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_lt (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_gt (o1:cmp_opr) (o2:cmp_opr) : ocmp
unfold let va_get_block (c:va_code{Block? c}) : va_codes = Block?.block c
unfold let va_get_ifCond (c:va_code{IfElse? c}) : ocmp = IfElse?.ifCond c | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_get_ifFalse (c: va_code{IfElse? c}) : va_code | [] | Vale.PPC64LE.Decls.va_get_ifFalse | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | c: Vale.PPC64LE.Decls.va_code{IfElse? c} -> Vale.PPC64LE.Decls.va_code | {
"end_col": 78,
"end_line": 252,
"start_col": 61,
"start_line": 252
} |
Prims.Tot | val va_get_block (c: va_code{Block? c}) : va_codes | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_get_block (c:va_code{Block? c}) : va_codes = Block?.block c | val va_get_block (c: va_code{Block? c}) : va_codes
let va_get_block (c: va_code{Block? c}) : va_codes = | false | null | false | Block?.block c | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_code",
"Prims.b2t",
"Vale.PPC64LE.Machine_s.uu___is_Block",
"Vale.PPC64LE.Decls.ins",
"Vale.PPC64LE.Decls.ocmp",
"Vale.PPC64LE.Machine_s.__proj__Block__item__block",
"Vale.PPC64LE.Decls.va_codes"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes
[@va_qattr] unfold let va_CNil () : va_codes = []
[@va_qattr] unfold let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl
// Constructors for va_code
unfold let va_Block (block:va_codes) : va_code = Block block
unfold let va_IfElse (ifCond:ocmp) (ifTrue:va_code) (ifFalse:va_code) : va_code = IfElse ifCond ifTrue ifFalse
unfold let va_While (whileCond:ocmp) (whileBody:va_code) : va_code = While whileCond whileBody
val va_cmp_eq (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ne (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_le (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ge (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_lt (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_gt (o1:cmp_opr) (o2:cmp_opr) : ocmp | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_get_block (c: va_code{Block? c}) : va_codes | [] | Vale.PPC64LE.Decls.va_get_block | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | c: Vale.PPC64LE.Decls.va_code{Block? c} -> Vale.PPC64LE.Decls.va_codes | {
"end_col": 73,
"end_line": 249,
"start_col": 59,
"start_line": 249
} |
Prims.Tot | val va_get_whileBody (c: va_code{While? c}) : va_code | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_get_whileBody (c:va_code{While? c}) : va_code = While?.whileBody c | val va_get_whileBody (c: va_code{While? c}) : va_code
let va_get_whileBody (c: va_code{While? c}) : va_code = | false | null | false | While?.whileBody c | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_code",
"Prims.b2t",
"Vale.PPC64LE.Machine_s.uu___is_While",
"Vale.PPC64LE.Decls.ins",
"Vale.PPC64LE.Decls.ocmp",
"Vale.PPC64LE.Machine_s.__proj__While__item__whileBody"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes
[@va_qattr] unfold let va_CNil () : va_codes = []
[@va_qattr] unfold let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl
// Constructors for va_code
unfold let va_Block (block:va_codes) : va_code = Block block
unfold let va_IfElse (ifCond:ocmp) (ifTrue:va_code) (ifFalse:va_code) : va_code = IfElse ifCond ifTrue ifFalse
unfold let va_While (whileCond:ocmp) (whileBody:va_code) : va_code = While whileCond whileBody
val va_cmp_eq (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ne (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_le (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ge (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_lt (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_gt (o1:cmp_opr) (o2:cmp_opr) : ocmp
unfold let va_get_block (c:va_code{Block? c}) : va_codes = Block?.block c
unfold let va_get_ifCond (c:va_code{IfElse? c}) : ocmp = IfElse?.ifCond c
unfold let va_get_ifTrue (c:va_code{IfElse? c}) : va_code = IfElse?.ifTrue c
unfold let va_get_ifFalse (c:va_code{IfElse? c}) : va_code = IfElse?.ifFalse c | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_get_whileBody (c: va_code{While? c}) : va_code | [] | Vale.PPC64LE.Decls.va_get_whileBody | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | c: Vale.PPC64LE.Decls.va_code{While? c} -> Vale.PPC64LE.Decls.va_code | {
"end_col": 80,
"end_line": 254,
"start_col": 62,
"start_line": 254
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_buffer (b:M.buffer64) (h1 h2:vale_heap) = modifies_mem (loc_buffer b) h1 h2 | let modifies_buffer (b: M.buffer64) (h1 h2: vale_heap) = | false | null | false | modifies_mem (loc_buffer b) h1 h2 | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Memory.buffer64",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Decls.modifies_mem",
"Vale.PPC64LE.Decls.loc_buffer",
"Vale.PPC64LE.Memory.vuint64",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes
[@va_qattr] unfold let va_CNil () : va_codes = []
[@va_qattr] unfold let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl
// Constructors for va_code
unfold let va_Block (block:va_codes) : va_code = Block block
unfold let va_IfElse (ifCond:ocmp) (ifTrue:va_code) (ifFalse:va_code) : va_code = IfElse ifCond ifTrue ifFalse
unfold let va_While (whileCond:ocmp) (whileBody:va_code) : va_code = While whileCond whileBody
val va_cmp_eq (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ne (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_le (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ge (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_lt (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_gt (o1:cmp_opr) (o2:cmp_opr) : ocmp
unfold let va_get_block (c:va_code{Block? c}) : va_codes = Block?.block c
unfold let va_get_ifCond (c:va_code{IfElse? c}) : ocmp = IfElse?.ifCond c
unfold let va_get_ifTrue (c:va_code{IfElse? c}) : va_code = IfElse?.ifTrue c
unfold let va_get_ifFalse (c:va_code{IfElse? c}) : va_code = IfElse?.ifFalse c
unfold let va_get_whileCond (c:va_code{While? c}) : ocmp = While?.whileCond c
unfold let va_get_whileBody (c:va_code{While? c}) : va_code = While?.whileBody c
// Map syntax
// syntax for map accesses, m.[key] and m.[key] <- value
type map (key:eqtype) (value:Type) = Map.t key value
let (.[]) = Map.sel
let (.[]<-) = Map.upd
(** Memory framing **)
(*
unfold let in_mem (addr:int) (m:mem) : bool = m `Map.contains` addr
let disjoint (ptr1:int) (num_bytes1:int) (ptr2:int) (num_bytes2:int) =
ptr1 + num_bytes1 <= ptr2 \/ ptr2 + num_bytes2 <= ptr1
let validSrcAddrs (mem:mem) (addr:int) (size:int) (num_bytes:int) =
size == 64 /\
(forall (a:int) . {:pattern (mem `Map.contains` a)} addr <= a && a < addr+num_bytes && (a - addr) % 8 = 0 ==> mem `Map.contains` a)
let memModified (old_mem:mem) (new_mem:mem) (ptr:int) (num_bytes) =
(forall (a:int) . {:pattern (new_mem `Map.contains` a)} old_mem `Map.contains` a <==> new_mem `Map.contains` a) /\
(forall (a:int) . {:pattern (new_mem.[a]) \/ Map.sel new_mem a} a < ptr || a >= ptr + num_bytes ==> old_mem.[a] == new_mem.[ a])
*)
(** Convenient memory-related functions **)
let rec buffers_readable (h: vale_heap) (l: list M.buffer64) : GTot prop0 (decreases l) =
match l with
| [] -> True
| b :: l' -> buffer_readable h b /\ buffers_readable h l' | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_buffer : b: Vale.PPC64LE.Memory.buffer64 ->
h1: Vale.PPC64LE.Decls.vale_heap ->
h2: Vale.PPC64LE.Decls.vale_heap
-> Vale.Def.Prop_s.prop0 | [] | Vale.PPC64LE.Decls.modifies_buffer | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
b: Vale.PPC64LE.Memory.buffer64 ->
h1: Vale.PPC64LE.Decls.vale_heap ->
h2: Vale.PPC64LE.Decls.vale_heap
-> Vale.Def.Prop_s.prop0 | {
"end_col": 95,
"end_line": 285,
"start_col": 62,
"start_line": 285
} |
|
Prims.Tot | val va_While (whileCond: ocmp) (whileBody: va_code) : va_code | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_While (whileCond:ocmp) (whileBody:va_code) : va_code = While whileCond whileBody | val va_While (whileCond: ocmp) (whileBody: va_code) : va_code
let va_While (whileCond: ocmp) (whileBody: va_code) : va_code = | false | null | false | While whileCond whileBody | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.ocmp",
"Vale.PPC64LE.Decls.va_code",
"Vale.PPC64LE.Machine_s.While",
"Vale.PPC64LE.Decls.ins"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes
[@va_qattr] unfold let va_CNil () : va_codes = []
[@va_qattr] unfold let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl
// Constructors for va_code
unfold let va_Block (block:va_codes) : va_code = Block block | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_While (whileCond: ocmp) (whileBody: va_code) : va_code | [] | Vale.PPC64LE.Decls.va_While | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | whileCond: Vale.PPC64LE.Decls.ocmp -> whileBody: Vale.PPC64LE.Decls.va_code
-> Vale.PPC64LE.Decls.va_code | {
"end_col": 94,
"end_line": 240,
"start_col": 69,
"start_line": 240
} |
Prims.Tot | val va_get_ifCond (c: va_code{IfElse? c}) : ocmp | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_get_ifCond (c:va_code{IfElse? c}) : ocmp = IfElse?.ifCond c | val va_get_ifCond (c: va_code{IfElse? c}) : ocmp
let va_get_ifCond (c: va_code{IfElse? c}) : ocmp = | false | null | false | IfElse?.ifCond c | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_code",
"Prims.b2t",
"Vale.PPC64LE.Machine_s.uu___is_IfElse",
"Vale.PPC64LE.Decls.ins",
"Vale.PPC64LE.Decls.ocmp",
"Vale.PPC64LE.Machine_s.__proj__IfElse__item__ifCond"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes
[@va_qattr] unfold let va_CNil () : va_codes = []
[@va_qattr] unfold let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl
// Constructors for va_code
unfold let va_Block (block:va_codes) : va_code = Block block
unfold let va_IfElse (ifCond:ocmp) (ifTrue:va_code) (ifFalse:va_code) : va_code = IfElse ifCond ifTrue ifFalse
unfold let va_While (whileCond:ocmp) (whileBody:va_code) : va_code = While whileCond whileBody
val va_cmp_eq (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ne (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_le (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ge (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_lt (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_gt (o1:cmp_opr) (o2:cmp_opr) : ocmp | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_get_ifCond (c: va_code{IfElse? c}) : ocmp | [] | Vale.PPC64LE.Decls.va_get_ifCond | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | c: Vale.PPC64LE.Decls.va_code{IfElse? c} -> Vale.PPC64LE.Decls.ocmp | {
"end_col": 73,
"end_line": 250,
"start_col": 57,
"start_line": 250
} |
Prims.Tot | val va_get_ifTrue (c: va_code{IfElse? c}) : va_code | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_get_ifTrue (c:va_code{IfElse? c}) : va_code = IfElse?.ifTrue c | val va_get_ifTrue (c: va_code{IfElse? c}) : va_code
let va_get_ifTrue (c: va_code{IfElse? c}) : va_code = | false | null | false | IfElse?.ifTrue c | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_code",
"Prims.b2t",
"Vale.PPC64LE.Machine_s.uu___is_IfElse",
"Vale.PPC64LE.Decls.ins",
"Vale.PPC64LE.Decls.ocmp",
"Vale.PPC64LE.Machine_s.__proj__IfElse__item__ifTrue"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes
[@va_qattr] unfold let va_CNil () : va_codes = []
[@va_qattr] unfold let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl
// Constructors for va_code
unfold let va_Block (block:va_codes) : va_code = Block block
unfold let va_IfElse (ifCond:ocmp) (ifTrue:va_code) (ifFalse:va_code) : va_code = IfElse ifCond ifTrue ifFalse
unfold let va_While (whileCond:ocmp) (whileBody:va_code) : va_code = While whileCond whileBody
val va_cmp_eq (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ne (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_le (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ge (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_lt (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_gt (o1:cmp_opr) (o2:cmp_opr) : ocmp
unfold let va_get_block (c:va_code{Block? c}) : va_codes = Block?.block c | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_get_ifTrue (c: va_code{IfElse? c}) : va_code | [] | Vale.PPC64LE.Decls.va_get_ifTrue | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | c: Vale.PPC64LE.Decls.va_code{IfElse? c} -> Vale.PPC64LE.Decls.va_code | {
"end_col": 76,
"end_line": 251,
"start_col": 60,
"start_line": 251
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_buffer_2 (b1 b2:M.buffer64) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (loc_buffer b2)) h1 h2 | let modifies_buffer_2 (b1 b2: M.buffer64) (h1 h2: vale_heap) = | false | null | false | modifies_mem (M.loc_union (loc_buffer b1) (loc_buffer b2)) h1 h2 | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Memory.buffer64",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Decls.modifies_mem",
"Vale.PPC64LE.Memory.loc_union",
"Vale.PPC64LE.Decls.loc_buffer",
"Vale.PPC64LE.Memory.vuint64",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes
[@va_qattr] unfold let va_CNil () : va_codes = []
[@va_qattr] unfold let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl
// Constructors for va_code
unfold let va_Block (block:va_codes) : va_code = Block block
unfold let va_IfElse (ifCond:ocmp) (ifTrue:va_code) (ifFalse:va_code) : va_code = IfElse ifCond ifTrue ifFalse
unfold let va_While (whileCond:ocmp) (whileBody:va_code) : va_code = While whileCond whileBody
val va_cmp_eq (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ne (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_le (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ge (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_lt (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_gt (o1:cmp_opr) (o2:cmp_opr) : ocmp
unfold let va_get_block (c:va_code{Block? c}) : va_codes = Block?.block c
unfold let va_get_ifCond (c:va_code{IfElse? c}) : ocmp = IfElse?.ifCond c
unfold let va_get_ifTrue (c:va_code{IfElse? c}) : va_code = IfElse?.ifTrue c
unfold let va_get_ifFalse (c:va_code{IfElse? c}) : va_code = IfElse?.ifFalse c
unfold let va_get_whileCond (c:va_code{While? c}) : ocmp = While?.whileCond c
unfold let va_get_whileBody (c:va_code{While? c}) : va_code = While?.whileBody c
// Map syntax
// syntax for map accesses, m.[key] and m.[key] <- value
type map (key:eqtype) (value:Type) = Map.t key value
let (.[]) = Map.sel
let (.[]<-) = Map.upd
(** Memory framing **)
(*
unfold let in_mem (addr:int) (m:mem) : bool = m `Map.contains` addr
let disjoint (ptr1:int) (num_bytes1:int) (ptr2:int) (num_bytes2:int) =
ptr1 + num_bytes1 <= ptr2 \/ ptr2 + num_bytes2 <= ptr1
let validSrcAddrs (mem:mem) (addr:int) (size:int) (num_bytes:int) =
size == 64 /\
(forall (a:int) . {:pattern (mem `Map.contains` a)} addr <= a && a < addr+num_bytes && (a - addr) % 8 = 0 ==> mem `Map.contains` a)
let memModified (old_mem:mem) (new_mem:mem) (ptr:int) (num_bytes) =
(forall (a:int) . {:pattern (new_mem `Map.contains` a)} old_mem `Map.contains` a <==> new_mem `Map.contains` a) /\
(forall (a:int) . {:pattern (new_mem.[a]) \/ Map.sel new_mem a} a < ptr || a >= ptr + num_bytes ==> old_mem.[a] == new_mem.[ a])
*)
(** Convenient memory-related functions **)
let rec buffers_readable (h: vale_heap) (l: list M.buffer64) : GTot prop0 (decreases l) =
match l with
| [] -> True
| b :: l' -> buffer_readable h b /\ buffers_readable h l'
unfold let modifies_buffer (b:M.buffer64) (h1 h2:vale_heap) = modifies_mem (loc_buffer b) h1 h2 | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_buffer_2 : b1: Vale.PPC64LE.Memory.buffer64 ->
b2: Vale.PPC64LE.Memory.buffer64 ->
h1: Vale.PPC64LE.Decls.vale_heap ->
h2: Vale.PPC64LE.Decls.vale_heap
-> Vale.Def.Prop_s.prop0 | [] | Vale.PPC64LE.Decls.modifies_buffer_2 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
b1: Vale.PPC64LE.Memory.buffer64 ->
b2: Vale.PPC64LE.Memory.buffer64 ->
h1: Vale.PPC64LE.Decls.vale_heap ->
h2: Vale.PPC64LE.Decls.vale_heap
-> Vale.Def.Prop_s.prop0 | {
"end_col": 66,
"end_line": 287,
"start_col": 2,
"start_line": 287
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_buffer128 (b:M.buffer128) (h1 h2:vale_heap) = modifies_mem (loc_buffer b) h1 h2 | let modifies_buffer128 (b: M.buffer128) (h1 h2: vale_heap) = | false | null | false | modifies_mem (loc_buffer b) h1 h2 | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Memory.buffer128",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Decls.modifies_mem",
"Vale.PPC64LE.Decls.loc_buffer",
"Vale.PPC64LE.Memory.vuint128",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes
[@va_qattr] unfold let va_CNil () : va_codes = []
[@va_qattr] unfold let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl
// Constructors for va_code
unfold let va_Block (block:va_codes) : va_code = Block block
unfold let va_IfElse (ifCond:ocmp) (ifTrue:va_code) (ifFalse:va_code) : va_code = IfElse ifCond ifTrue ifFalse
unfold let va_While (whileCond:ocmp) (whileBody:va_code) : va_code = While whileCond whileBody
val va_cmp_eq (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ne (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_le (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ge (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_lt (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_gt (o1:cmp_opr) (o2:cmp_opr) : ocmp
unfold let va_get_block (c:va_code{Block? c}) : va_codes = Block?.block c
unfold let va_get_ifCond (c:va_code{IfElse? c}) : ocmp = IfElse?.ifCond c
unfold let va_get_ifTrue (c:va_code{IfElse? c}) : va_code = IfElse?.ifTrue c
unfold let va_get_ifFalse (c:va_code{IfElse? c}) : va_code = IfElse?.ifFalse c
unfold let va_get_whileCond (c:va_code{While? c}) : ocmp = While?.whileCond c
unfold let va_get_whileBody (c:va_code{While? c}) : va_code = While?.whileBody c
// Map syntax
// syntax for map accesses, m.[key] and m.[key] <- value
type map (key:eqtype) (value:Type) = Map.t key value
let (.[]) = Map.sel
let (.[]<-) = Map.upd
(** Memory framing **)
(*
unfold let in_mem (addr:int) (m:mem) : bool = m `Map.contains` addr
let disjoint (ptr1:int) (num_bytes1:int) (ptr2:int) (num_bytes2:int) =
ptr1 + num_bytes1 <= ptr2 \/ ptr2 + num_bytes2 <= ptr1
let validSrcAddrs (mem:mem) (addr:int) (size:int) (num_bytes:int) =
size == 64 /\
(forall (a:int) . {:pattern (mem `Map.contains` a)} addr <= a && a < addr+num_bytes && (a - addr) % 8 = 0 ==> mem `Map.contains` a)
let memModified (old_mem:mem) (new_mem:mem) (ptr:int) (num_bytes) =
(forall (a:int) . {:pattern (new_mem `Map.contains` a)} old_mem `Map.contains` a <==> new_mem `Map.contains` a) /\
(forall (a:int) . {:pattern (new_mem.[a]) \/ Map.sel new_mem a} a < ptr || a >= ptr + num_bytes ==> old_mem.[a] == new_mem.[ a])
*)
(** Convenient memory-related functions **)
let rec buffers_readable (h: vale_heap) (l: list M.buffer64) : GTot prop0 (decreases l) =
match l with
| [] -> True
| b :: l' -> buffer_readable h b /\ buffers_readable h l'
unfold let modifies_buffer (b:M.buffer64) (h1 h2:vale_heap) = modifies_mem (loc_buffer b) h1 h2
unfold let modifies_buffer_2 (b1 b2:M.buffer64) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (loc_buffer b2)) h1 h2
unfold let modifies_buffer_3 (b1 b2 b3:M.buffer64) (h1 h2:vale_heap) = | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_buffer128 : b: Vale.PPC64LE.Memory.buffer128 ->
h1: Vale.PPC64LE.Decls.vale_heap ->
h2: Vale.PPC64LE.Decls.vale_heap
-> Vale.Def.Prop_s.prop0 | [] | Vale.PPC64LE.Decls.modifies_buffer128 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
b: Vale.PPC64LE.Memory.buffer128 ->
h1: Vale.PPC64LE.Decls.vale_heap ->
h2: Vale.PPC64LE.Decls.vale_heap
-> Vale.Def.Prop_s.prop0 | {
"end_col": 99,
"end_line": 290,
"start_col": 66,
"start_line": 290
} |
|
Prims.Tot | val from_heap_impl (heap: heap_impl) : vale_full_heap | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap | val from_heap_impl (heap: heap_impl) : vale_full_heap
let from_heap_impl (heap: heap_impl) : vale_full_heap = | false | null | false | coerce heap | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.Arch.Heap.heap_impl",
"Vale.PPC64LE.Decls.coerce",
"Vale.Arch.HeapImpl.vale_full_heap"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val from_heap_impl (heap: heap_impl) : vale_full_heap | [] | Vale.PPC64LE.Decls.from_heap_impl | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | heap: Vale.Arch.Heap.heap_impl -> Vale.Arch.HeapImpl.vale_full_heap | {
"end_col": 73,
"end_line": 24,
"start_col": 62,
"start_line": 24
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_buffer128_2 (b1 b2:M.buffer128) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (loc_buffer b2)) h1 h2 | let modifies_buffer128_2 (b1 b2: M.buffer128) (h1 h2: vale_heap) = | false | null | false | modifies_mem (M.loc_union (loc_buffer b1) (loc_buffer b2)) h1 h2 | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Memory.buffer128",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Decls.modifies_mem",
"Vale.PPC64LE.Memory.loc_union",
"Vale.PPC64LE.Decls.loc_buffer",
"Vale.PPC64LE.Memory.vuint128",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint }
// Framing: va_update_foo means the two states are the same except for foo
[@va_qattr] unfold let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK
[@va_qattr] unfold let va_update_cr0 (sM:va_state) (sK:va_state) : va_state = va_upd_cr0 sM.cr0 sK
[@va_qattr] unfold let va_update_xer (sM:va_state) (sK:va_state) : va_state = va_upd_xer sM.xer sK
[@va_qattr] unfold let va_update_reg (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_upd_reg r (eval_reg r sM) sK
[@va_qattr] unfold let va_update_mem (sM:va_state) (sK:va_state) : va_state = va_upd_mem (coerce sM.ms_heap).vf_heap sK
[@va_qattr] unfold let va_update_mem_layout (sM:va_state) (sK:va_state) : va_state = va_upd_mem_layout (coerce sM.ms_heap).vf_layout sK
[@va_qattr] unfold let va_update_mem_heaplet (n:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_upd_mem_heaplet n (Map16.sel (coerce sM.ms_heap).vf_heaplets n) sK
[@va_qattr] unfold let va_update_vec (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_upd_vec x (eval_vec x sM) sK
[@va_qattr] unfold let va_update_stack (sM:va_state) (sK:va_state) : va_state = va_upd_stack (VSS.stack_from_s sM.ms_stack) sK
[@va_qattr] unfold let va_update_stackTaint (sM:va_state) (sK:va_state) : va_state = va_upd_stackTaint sM.ms_stackTaint sK
[@va_qattr] unfold
let va_update_operand_reg_opr (r:reg) (sM:va_state) (sK:va_state) : va_state =
va_update_reg r sM sK
[@va_qattr] unfold
let va_update_operand_Mem64 (m:maddr) (sM:va_state) (sK:va_state) : va_state =
va_update_mem sM sK
[@va_qattr] unfold
let va_update_operand_vec_opr (x:vec) (sM:va_state) (sK:va_state) : va_state =
va_update_vec x sM sK
[@va_qattr] unfold
let va_update_operand_heaplet (h:heaplet_id) (sM:va_state) (sK:va_state) : va_state =
va_update_mem_heaplet h sM sK
unfold let va_value_reg_opr = nat64
unfold let va_value_vec_opr = quad32
unfold let va_value_heaplet = vale_heap
[@va_qattr] let va_upd_operand_reg_opr (r:reg_opr) (v:nat64) (s:state) : state = va_upd_reg r v s
[@va_qattr] let va_upd_operand_vec_opr (x:vec) (v:quad32) (s:state) : state = va_upd_vec x v s
[@va_qattr] let va_upd_operand_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
va_upd_mem_heaplet n h s
let va_lemma_upd_update (sM:state) : Lemma
(
(forall (sK:state).{:pattern (va_update_xer sM sK)} va_update_xer sM sK == va_upd_xer sM.xer sK) /\
(forall (sK:state) (h:heaplet_id).{:pattern (va_update_operand_heaplet h sM sK)} va_update_operand_heaplet h sM sK == va_upd_operand_heaplet h (Map16.sel (coerce sM.ms_heap).vf_heaplets h) sK) /\
(forall (sK:state) (r:reg).{:pattern (va_update_operand_reg_opr r sM sK)} va_update_operand_reg_opr r sM sK == va_upd_operand_reg_opr r (eval_reg r sM) sK) /\
(forall (sK:state) (x:vec).{:pattern (va_update_operand_vec_opr x sM sK)} va_update_operand_vec_opr x sM sK == va_upd_operand_vec_opr x (eval_vec x sM) sK)
)
= ()
// Constructors for va_codes
[@va_qattr] unfold let va_CNil () : va_codes = []
[@va_qattr] unfold let va_CCons (hd:va_code) (tl:va_codes) : va_codes = hd::tl
// Constructors for va_code
unfold let va_Block (block:va_codes) : va_code = Block block
unfold let va_IfElse (ifCond:ocmp) (ifTrue:va_code) (ifFalse:va_code) : va_code = IfElse ifCond ifTrue ifFalse
unfold let va_While (whileCond:ocmp) (whileBody:va_code) : va_code = While whileCond whileBody
val va_cmp_eq (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ne (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_le (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_ge (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_lt (o1:cmp_opr) (o2:cmp_opr) : ocmp
val va_cmp_gt (o1:cmp_opr) (o2:cmp_opr) : ocmp
unfold let va_get_block (c:va_code{Block? c}) : va_codes = Block?.block c
unfold let va_get_ifCond (c:va_code{IfElse? c}) : ocmp = IfElse?.ifCond c
unfold let va_get_ifTrue (c:va_code{IfElse? c}) : va_code = IfElse?.ifTrue c
unfold let va_get_ifFalse (c:va_code{IfElse? c}) : va_code = IfElse?.ifFalse c
unfold let va_get_whileCond (c:va_code{While? c}) : ocmp = While?.whileCond c
unfold let va_get_whileBody (c:va_code{While? c}) : va_code = While?.whileBody c
// Map syntax
// syntax for map accesses, m.[key] and m.[key] <- value
type map (key:eqtype) (value:Type) = Map.t key value
let (.[]) = Map.sel
let (.[]<-) = Map.upd
(** Memory framing **)
(*
unfold let in_mem (addr:int) (m:mem) : bool = m `Map.contains` addr
let disjoint (ptr1:int) (num_bytes1:int) (ptr2:int) (num_bytes2:int) =
ptr1 + num_bytes1 <= ptr2 \/ ptr2 + num_bytes2 <= ptr1
let validSrcAddrs (mem:mem) (addr:int) (size:int) (num_bytes:int) =
size == 64 /\
(forall (a:int) . {:pattern (mem `Map.contains` a)} addr <= a && a < addr+num_bytes && (a - addr) % 8 = 0 ==> mem `Map.contains` a)
let memModified (old_mem:mem) (new_mem:mem) (ptr:int) (num_bytes) =
(forall (a:int) . {:pattern (new_mem `Map.contains` a)} old_mem `Map.contains` a <==> new_mem `Map.contains` a) /\
(forall (a:int) . {:pattern (new_mem.[a]) \/ Map.sel new_mem a} a < ptr || a >= ptr + num_bytes ==> old_mem.[a] == new_mem.[ a])
*)
(** Convenient memory-related functions **)
let rec buffers_readable (h: vale_heap) (l: list M.buffer64) : GTot prop0 (decreases l) =
match l with
| [] -> True
| b :: l' -> buffer_readable h b /\ buffers_readable h l'
unfold let modifies_buffer (b:M.buffer64) (h1 h2:vale_heap) = modifies_mem (loc_buffer b) h1 h2
unfold let modifies_buffer_2 (b1 b2:M.buffer64) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (loc_buffer b2)) h1 h2
unfold let modifies_buffer_3 (b1 b2 b3:M.buffer64) (h1 h2:vale_heap) =
modifies_mem (M.loc_union (loc_buffer b1) (M.loc_union (loc_buffer b2) (loc_buffer b3))) h1 h2
unfold let modifies_buffer128 (b:M.buffer128) (h1 h2:vale_heap) = modifies_mem (loc_buffer b) h1 h2 | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_buffer128_2 : b1: Vale.PPC64LE.Memory.buffer128 ->
b2: Vale.PPC64LE.Memory.buffer128 ->
h1: Vale.PPC64LE.Decls.vale_heap ->
h2: Vale.PPC64LE.Decls.vale_heap
-> Vale.Def.Prop_s.prop0 | [] | Vale.PPC64LE.Decls.modifies_buffer128_2 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
b1: Vale.PPC64LE.Memory.buffer128 ->
b2: Vale.PPC64LE.Memory.buffer128 ->
h1: Vale.PPC64LE.Decls.vale_heap ->
h2: Vale.PPC64LE.Decls.vale_heap
-> Vale.Def.Prop_s.prop0 | {
"end_col": 66,
"end_line": 292,
"start_col": 2,
"start_line": 292
} |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.